Compare commits
99 Commits
0ea8efffcd
...
master
Author | SHA1 | Date | |
---|---|---|---|
da60952fe4 | |||
d6ea3f1853 | |||
adb7bf6795 | |||
215db1682d | |||
36f2eecba1 | |||
1f588e90bc | |||
b534a9bccf | |||
e5e78a0527 | |||
d98de5aff0 | |||
ffaf43e56f | |||
20d303e79a | |||
117627889f | |||
ab9b1009cb | |||
6605fe0866 | |||
782981c6f8 | |||
aef611f731 | |||
c6e93b353d | |||
e112d0ef8c | |||
387ef3bbbe | |||
31f67e3aad | |||
795585260a | |||
6c08ba37c6 | |||
e6b8f35906 | |||
692d115d5c | |||
0042adba00 | |||
96cce1634b | |||
40ab99bf87 | |||
c460559b34 | |||
9e9ec037b8 | |||
71a2fe1beb | |||
0436224728 | |||
e5dcc062de | |||
bb6777f389 | |||
467e98b01a | |||
161967ebac | |||
3026e30783 | |||
494228b367 | |||
024a056d9e | |||
0f4b7ac7a5 | |||
75b31844a0 | |||
19c38255a2 | |||
9fc23aa0b2 | |||
5c8cf16348 | |||
5645fba3e1 | |||
79dc452799 | |||
de2b657ec1 | |||
9ef85a53e6 | |||
1995283c73 | |||
d62fc4a8a3 | |||
e0d39b389d | |||
4bc93e5546 | |||
299c15e30d | |||
448401b7b0 | |||
94a802c1e6 | |||
2ab821e182 | |||
ccfaaac36d | |||
8b09465349 | |||
c871116f9f | |||
0daecee302 | |||
7fb2be86ee | |||
c8e6465ee9 | |||
1e9e1136f0 | |||
7e2cce9c72 | |||
b033946444 | |||
3de179d613 | |||
a7a8290f66 | |||
0c167ec6ab | |||
63cbe035ad | |||
8297cf976e | |||
fb26d472f2 | |||
bfbc829f91 | |||
fb835f4f40 | |||
b6da0efa29 | |||
dab22b8c96 | |||
f2c8b15b84 | |||
8bf8ae3def | |||
1d929f5fb3 | |||
0971286823 | |||
deee35fc32 | |||
b830f84ebe | |||
b64dc5f595 | |||
03f11d84ce | |||
10d4d8f718 | |||
a065550d50 | |||
87d5a5bf04 | |||
71d68a47ed | |||
687b5bf422 | |||
b20e3abd1f | |||
e265dc3853 | |||
7e971bc330 | |||
ae78b026e6 | |||
c9a8c0707e | |||
7e960f3fce | |||
7bbc347dbf | |||
c69ae6f05b | |||
aed1a8aafa | |||
945eb67104 | |||
d36e30638c | |||
4c88acc64b |
237
.gitignore
vendored
Normal file
237
.gitignore
vendored
Normal file
@@ -0,0 +1,237 @@
|
|||||||
|
# ---> JetBrains
|
||||||
|
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
|
||||||
|
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
|
||||||
|
|
||||||
|
# pyenv
|
||||||
|
.python-version
|
||||||
|
|
||||||
|
# User-specific stuff
|
||||||
|
.idea/**/workspace.xml
|
||||||
|
.idea/**/tasks.xml
|
||||||
|
.idea/**/usage.statistics.xml
|
||||||
|
.idea/**/dictionaries
|
||||||
|
.idea/**/shelf
|
||||||
|
|
||||||
|
# AWS User-specific
|
||||||
|
.idea/**/aws.xml
|
||||||
|
|
||||||
|
# Generated files
|
||||||
|
.idea/**/contentModel.xml
|
||||||
|
|
||||||
|
# Sensitive or high-churn files
|
||||||
|
.idea/**/dataSources/
|
||||||
|
.idea/**/dataSources.ids
|
||||||
|
.idea/**/dataSources.local.xml
|
||||||
|
.idea/**/sqlDataSources.xml
|
||||||
|
.idea/**/dynamic.xml
|
||||||
|
.idea/**/uiDesigner.xml
|
||||||
|
.idea/**/dbnavigator.xml
|
||||||
|
|
||||||
|
# Gradle
|
||||||
|
.idea/**/gradle.xml
|
||||||
|
.idea/**/libraries
|
||||||
|
|
||||||
|
# Gradle and Maven with auto-import
|
||||||
|
# When using Gradle or Maven with auto-import, you should exclude module files,
|
||||||
|
# since they will be recreated, and may cause churn. Uncomment if using
|
||||||
|
# auto-import.
|
||||||
|
# .idea/artifacts
|
||||||
|
# .idea/compiler.xml
|
||||||
|
# .idea/jarRepositories.xml
|
||||||
|
# .idea/modules.xml
|
||||||
|
# .idea/*.iml
|
||||||
|
# .idea/modules
|
||||||
|
# *.iml
|
||||||
|
# *.ipr
|
||||||
|
|
||||||
|
# CMake
|
||||||
|
cmake-build-*/
|
||||||
|
|
||||||
|
# Mongo Explorer plugin
|
||||||
|
.idea/**/mongoSettings.xml
|
||||||
|
|
||||||
|
# File-based project format
|
||||||
|
*.iws
|
||||||
|
|
||||||
|
# IntelliJ
|
||||||
|
out/
|
||||||
|
|
||||||
|
# mpeltonen/sbt-idea plugin
|
||||||
|
.idea_modules/
|
||||||
|
|
||||||
|
# JIRA plugin
|
||||||
|
atlassian-ide-plugin.xml
|
||||||
|
|
||||||
|
# Cursive Clojure plugin
|
||||||
|
.idea/replstate.xml
|
||||||
|
|
||||||
|
# SonarLint plugin
|
||||||
|
.idea/sonarlint/
|
||||||
|
|
||||||
|
# Crashlytics plugin (for Android Studio and IntelliJ)
|
||||||
|
com_crashlytics_export_strings.xml
|
||||||
|
crashlytics.properties
|
||||||
|
crashlytics-build.properties
|
||||||
|
fabric.properties
|
||||||
|
|
||||||
|
# Editor-based Rest Client
|
||||||
|
.idea/httpRequests
|
||||||
|
|
||||||
|
# Android studio 3.1+ serialized cache file
|
||||||
|
.idea/caches/build_file_checksums.ser
|
||||||
|
|
||||||
|
# ---> JetBrainsWorkspace
|
||||||
|
# Additional coverage for JetBrains IDEs workspace files
|
||||||
|
.idea/deployment.xml
|
||||||
|
.idea/misc.xml
|
||||||
|
.idea/remote-mappings.xml
|
||||||
|
.idea/*.iml
|
||||||
|
|
||||||
|
# ---> Python
|
||||||
|
# Byte-compiled / optimized / DLL files
|
||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
*$py.class
|
||||||
|
|
||||||
|
# C extensions
|
||||||
|
*.so
|
||||||
|
|
||||||
|
# Distribution / packaging
|
||||||
|
.Python
|
||||||
|
build/
|
||||||
|
develop-eggs/
|
||||||
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
.eggs/
|
||||||
|
lib/
|
||||||
|
lib64/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
wheels/
|
||||||
|
share/python-wheels/
|
||||||
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
MANIFEST
|
||||||
|
|
||||||
|
# PyInstaller
|
||||||
|
# Usually these files are written by a python script from a template
|
||||||
|
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||||
|
*.manifest
|
||||||
|
*.spec
|
||||||
|
|
||||||
|
# Installer logs
|
||||||
|
pip-log.txt
|
||||||
|
pip-delete-this-directory.txt
|
||||||
|
|
||||||
|
# Unit test / coverage reports
|
||||||
|
htmlcov/
|
||||||
|
.tox/
|
||||||
|
.nox/
|
||||||
|
.coverage
|
||||||
|
.coverage.*
|
||||||
|
.cache
|
||||||
|
nosetests.xml
|
||||||
|
coverage.xml
|
||||||
|
*.cover
|
||||||
|
*.py,cover
|
||||||
|
.hypothesis/
|
||||||
|
.pytest_cache/
|
||||||
|
cover/
|
||||||
|
|
||||||
|
# Translations
|
||||||
|
*.mo
|
||||||
|
*.pot
|
||||||
|
|
||||||
|
# Django stuff:
|
||||||
|
*.log
|
||||||
|
local_settings.py
|
||||||
|
db.sqlite3
|
||||||
|
db.sqlite3-journal
|
||||||
|
|
||||||
|
# Flask stuff:
|
||||||
|
instance/
|
||||||
|
.webassets-cache
|
||||||
|
|
||||||
|
# Scrapy stuff:
|
||||||
|
.scrapy
|
||||||
|
|
||||||
|
# Sphinx documentation
|
||||||
|
docs/_build/
|
||||||
|
|
||||||
|
# PyBuilder
|
||||||
|
.pybuilder/
|
||||||
|
target/
|
||||||
|
|
||||||
|
# Jupyter Notebook
|
||||||
|
.ipynb_checkpoints
|
||||||
|
|
||||||
|
# IPython
|
||||||
|
profile_default/
|
||||||
|
ipython_config.py
|
||||||
|
|
||||||
|
# pyenv
|
||||||
|
# For a library or package, you might want to ignore these files since the code is
|
||||||
|
# intended to run in multiple environments; otherwise, check them in:
|
||||||
|
# .python-version
|
||||||
|
|
||||||
|
# pipenv
|
||||||
|
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||||
|
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||||
|
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||||
|
# install all needed dependencies.
|
||||||
|
#Pipfile.lock
|
||||||
|
|
||||||
|
# poetry
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||||
|
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||||
|
# commonly ignored for libraries.
|
||||||
|
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||||
|
#poetry.lock
|
||||||
|
|
||||||
|
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
||||||
|
__pypackages__/
|
||||||
|
|
||||||
|
# Celery stuff
|
||||||
|
celerybeat-schedule
|
||||||
|
celerybeat.pid
|
||||||
|
|
||||||
|
# SageMath parsed files
|
||||||
|
*.sage.py
|
||||||
|
|
||||||
|
# Environments
|
||||||
|
.env
|
||||||
|
.venv
|
||||||
|
env/
|
||||||
|
venv/
|
||||||
|
ENV/
|
||||||
|
env.bak/
|
||||||
|
venv.bak/
|
||||||
|
!docker-compose/examples/*/env
|
||||||
|
!docker-compose/{{ cookiecutter.__project_slug }}/env
|
||||||
|
|
||||||
|
# Spyder project settings
|
||||||
|
.spyderproject
|
||||||
|
.spyproject
|
||||||
|
|
||||||
|
# Rope project settings
|
||||||
|
.ropeproject
|
||||||
|
|
||||||
|
# mkdocs documentation
|
||||||
|
/site
|
||||||
|
|
||||||
|
# mypy
|
||||||
|
.mypy_cache/
|
||||||
|
.dmypy.json
|
||||||
|
dmypy.json
|
||||||
|
|
||||||
|
# Pyre type checker
|
||||||
|
.pyre/
|
||||||
|
|
||||||
|
# pytype static type analyzer
|
||||||
|
.pytype/
|
||||||
|
|
||||||
|
# Cython debug symbols
|
||||||
|
cython_debug/
|
0
.idea/.gitignore
generated
vendored
Normal file
0
.idea/.gitignore
generated
vendored
Normal file
6
.idea/inspectionProfiles/profiles_settings.xml
generated
Normal file
6
.idea/inspectionProfiles/profiles_settings.xml
generated
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
<component name="InspectionProjectProfileManager">
|
||||||
|
<settings>
|
||||||
|
<option name="USE_PROJECT_PROFILE" value="false" />
|
||||||
|
<version value="1.0" />
|
||||||
|
</settings>
|
||||||
|
</component>
|
8
.idea/modules.xml
generated
Normal file
8
.idea/modules.xml
generated
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project version="4">
|
||||||
|
<component name="ProjectModuleManager">
|
||||||
|
<modules>
|
||||||
|
<module fileurl="file://$PROJECT_DIR$/.idea/py-cookiecutter-templates.iml" filepath="$PROJECT_DIR$/.idea/py-cookiecutter-templates.iml" />
|
||||||
|
</modules>
|
||||||
|
</component>
|
||||||
|
</project>
|
12
.idea/vcs.xml
generated
Normal file
12
.idea/vcs.xml
generated
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project version="4">
|
||||||
|
<component name="CommitMessageInspectionProfile">
|
||||||
|
<profile version="1.0">
|
||||||
|
<inspection_tool class="CommitFormat" enabled="true" level="WARNING" enabled_by_default="true" />
|
||||||
|
<inspection_tool class="CommitNamingConvention" enabled="true" level="WARNING" enabled_by_default="true" />
|
||||||
|
</profile>
|
||||||
|
</component>
|
||||||
|
<component name="VcsDirectoryMappings">
|
||||||
|
<mapping directory="$PROJECT_DIR$" vcs="Git" />
|
||||||
|
</component>
|
||||||
|
</project>
|
207
README.md
207
README.md
@@ -2,24 +2,26 @@
|
|||||||
|
|
||||||
Project directory structure templates for the Python Cookiecutter package.
|
Project directory structure templates for the Python Cookiecutter package.
|
||||||
|
|
||||||
## What's Cookiecutter?
|
# What's Cookiecutter?
|
||||||
|
|
||||||
The Python package [Cookiecutter](https://github.com/cookiecutter/cookiecutter) assists in creating directory structure for whatever purpose you need such as for example a new Python project, an Ansible role or a `docker-compose` project - anything really that benefits from a unifirm reproducible directory structure. If you've ever wanted to put project structure best practices into version control then Cookiecutter's here to help.
|
The Python package [Cookiecutter](https://github.com/cookiecutter/cookiecutter) assists in creating directory structure for whatever purpose you need such as for example a new Python project, an Ansible role or a `docker-compose` project - anything really that benefits from a unifirm reproducible directory structure. If you've ever wanted to put project structure best practices into version control then Cookiecutter's here to help.
|
||||||
|
|
||||||
Cookiecutter is governed by so-called Cookiecutter templates, most of its magic inside Cookiecutter templates happens via the [Jinja2 template engine](https://palletsprojects.com/p/jinja/). You'll feel right at home if you're familiar with Ansible.
|
Cookiecutter is governed by so-called Cookiecutter templates, most of its magic inside Cookiecutter templates happens via the [Jinja2 template engine](https://palletsprojects.com/p/jinja/). You'll feel right at home if you're familiar with Ansible.
|
||||||
|
|
||||||
## Repo layout
|
# Repo layout
|
||||||
|
|
||||||
Each subdirectory in this repo is a Cookiecutter template, you'll recognize them from their telltale `cookiecutter.json` files. Directories usually also have a readme file explaining more about each individual template.
|
Each subdirectory in this repo is a Cookiecutter template, you'll recognize them from their telltale `cookiecutter.json` files. Directories usually also have a readme file explaining more about each individual template.
|
||||||
|
|
||||||
## Get started
|
# Get started
|
||||||
|
|
||||||
Get Cookiecutter like so:
|
Get Cookiecutter like so:
|
||||||
```
|
```
|
||||||
pip install cookiecutter
|
pip install cookiecutter
|
||||||
```
|
```
|
||||||
|
|
||||||
Execute a template like so, `docker-compose` as an example:
|
Unfamiliar with Python and `pip`? Check out [Developing](#developing) further down to get started with a virtual environment.
|
||||||
|
|
||||||
|
When all is set execute a template like so, `docker-compose` as an example:
|
||||||
```
|
```
|
||||||
cookiecutter https://quico.space/Quico/py-cookiecutter-templates.git --directory 'docker-compose'
|
cookiecutter https://quico.space/Quico/py-cookiecutter-templates.git --directory 'docker-compose'
|
||||||
```
|
```
|
||||||
@@ -31,7 +33,10 @@ This is Cookiecutter prompting for info:
|
|||||||
project_slug [dir-name]: grafana
|
project_slug [dir-name]: grafana
|
||||||
service [grafana]:
|
service [grafana]:
|
||||||
component_list [grafana]: grafana,nginx
|
component_list [grafana]: grafana,nginx
|
||||||
context [ctx]: cncf
|
Select build:
|
||||||
|
1 - no
|
||||||
|
2 - yes
|
||||||
|
Choose from 1, 2 [1]:
|
||||||
```
|
```
|
||||||
|
|
||||||
The end result is a directory structure that has everything you need to hit the ground running.
|
The end result is a directory structure that has everything you need to hit the ground running.
|
||||||
@@ -51,9 +56,191 @@ The end result is a directory structure that has everything you need to hit the
|
|||||||
│  ├── Dockerfile
|
│  ├── Dockerfile
|
||||||
│  └── extras
|
│  └── extras
|
||||||
│  └── .gitkeep
|
│  └── .gitkeep
|
||||||
├── common-settings.yml
|
├── common-settings.yaml
|
||||||
├── docker-compose.override.yml
|
├── compose.override.yaml
|
||||||
├── docker-compose.yml
|
├── compose.yaml
|
||||||
└── env
|
├── env
|
||||||
└── fully.qualified.domain.name.example
|
│  └── fqdn_context.env.example
|
||||||
|
└── README.md
|
||||||
```
|
```
|
||||||
|
|
||||||
|
# Developing
|
||||||
|
|
||||||
|
To change Cookiecutter templates get yourself an environment then make, test and commit your changes. First things first, the environment.
|
||||||
|
|
||||||
|
## Environment
|
||||||
|
|
||||||
|
Get yourself a Python virtual environment. In this example we're assuming you're running a Linux operating system and you'll be using [pyenv](https://github.com/pyenv/pyenv) to manage virtual environments.
|
||||||
|
|
||||||
|
### pyenv
|
||||||
|
|
||||||
|
- Install pyenv with what they are calling their automatic installer. Feel free to also read up on pyenv on their [GitHub project page](https://github.com/pyenv/pyenv).
|
||||||
|
```
|
||||||
|
curl https://pyenv.run | bash
|
||||||
|
```
|
||||||
|
|
||||||
|
- Following the installer's instruction add at least the following commands to your `~/.bashrc` file
|
||||||
|
```
|
||||||
|
export PYENV_ROOT="$HOME/.pyenv"
|
||||||
|
command -v pyenv >/dev/null || export PATH="$PYENV_ROOT/bin:$PATH"
|
||||||
|
eval "$(pyenv init -)"
|
||||||
|
```
|
||||||
|
|
||||||
|
- You will also likely want to add this line which will make sure pyenv auto-activates venvs when you navigate into certain directories. More on that down at [venv](#venv).
|
||||||
|
```
|
||||||
|
eval "$(pyenv virtualenv-init -)"
|
||||||
|
```
|
||||||
|
|
||||||
|
- Also make sure `~/.bashrc` gets loaded for example by including this in a `~/.bash_profile` file
|
||||||
|
```
|
||||||
|
[[ -f ~/.bashrc ]] && . ~/.bashrc
|
||||||
|
```
|
||||||
|
|
||||||
|
- Reload `~/.bashrc`
|
||||||
|
```
|
||||||
|
source ~/.bashrc
|
||||||
|
```
|
||||||
|
|
||||||
|
### Python
|
||||||
|
|
||||||
|
- Update pyenv's package list
|
||||||
|
```
|
||||||
|
pyenv update
|
||||||
|
```
|
||||||
|
- Pick a Python version you like, for example copy-paste `3.11.4`:
|
||||||
|
```
|
||||||
|
pyenv install --list | less
|
||||||
|
|
||||||
|
...
|
||||||
|
3.10.12
|
||||||
|
3.11.0
|
||||||
|
3.11-dev
|
||||||
|
3.11.1
|
||||||
|
3.11.2
|
||||||
|
3.11.3
|
||||||
|
3.11.4
|
||||||
|
3.12.0b2
|
||||||
|
3.12-dev
|
||||||
|
3.13-dev
|
||||||
|
...
|
||||||
|
```
|
||||||
|
- Install Python, wait for compilation to finish on your machine
|
||||||
|
```
|
||||||
|
pyenv install 3.11.4
|
||||||
|
```
|
||||||
|
|
||||||
|
### Repo
|
||||||
|
|
||||||
|
- Clone this repo
|
||||||
|
```
|
||||||
|
git clone https://quico.space/Quico/py-cookiecutter-templates.git ~/py-cookiecutter-templates
|
||||||
|
```
|
||||||
|
|
||||||
|
### venv
|
||||||
|
|
||||||
|
- Create a virtual environment where `3.11.4` is the Python version you want to use in this venv and `cookiecutter-3.11.4` is the name of your venv. Adding or dropping the Python version from your venv name comes down to personal preference.
|
||||||
|
```
|
||||||
|
pyenv virtualenv 3.11.4 cookiecutter-3.11.4
|
||||||
|
```
|
||||||
|
|
||||||
|
- In your repo path `~/py-cookiecutter-templates` create a `.python-version` file to tell pyenv to always activate your desired venv when inside this dir.
|
||||||
|
```
|
||||||
|
cd ~/py-cookiecutter-templates
|
||||||
|
pyenv local cookiecutter-3.11.4
|
||||||
|
```
|
||||||
|
pyenv will immediately prefix your shell's `${PS1}` prompt with the venv name.
|
||||||
|
```
|
||||||
|
(cookiecutter-3.11.4) [âś” 23:19 user@machine py-cookiecutter-templates]$
|
||||||
|
```
|
||||||
|
It will deactivate the venv and drop its prefix as soon as you navigate out of this dir.
|
||||||
|
```
|
||||||
|
(cookiecutter-3.11.4) [âś” 23:19 user@machine py-cookiecutter-templates]$ cd
|
||||||
|
[âś” 23:19 user@machine ~]$
|
||||||
|
```
|
||||||
|
For now though stay in `~/py-cookiecutter-templates`, you're going to want to pip-install [cookiecutter](https://pypi.org/project/cookiecutter).
|
||||||
|
|
||||||
|
- Upgrade `pip`
|
||||||
|
```
|
||||||
|
pip install --upgrade pip
|
||||||
|
```
|
||||||
|
|
||||||
|
- Install [cookiecutter](https://pypi.org/project/cookiecutter)
|
||||||
|
```
|
||||||
|
pip install cookiecutter
|
||||||
|
```
|
||||||
|
|
||||||
|
All done, your environment is set up.
|
||||||
|
|
||||||
|
## Change
|
||||||
|
|
||||||
|
Make some code changes, for example to the Docker Compose Cookiecutter template. When you're happy run your local Cookiecutter template to see how your changes are rendering.
|
||||||
|
|
||||||
|
- Create `/tmp/cookiecutter-docker-compose`
|
||||||
|
```
|
||||||
|
mkdir '/tmp/cookiecutter-docker-compose'
|
||||||
|
```
|
||||||
|
|
||||||
|
- Render a Docker Compose directory into your output directory, answer Cookiecutter's prompts:
|
||||||
|
```
|
||||||
|
# cookiecutter ~/py-cookiecutter-templates \
|
||||||
|
--directory docker-compose \
|
||||||
|
--output-dir /tmp/cookiecutter-docker-compose
|
||||||
|
|
||||||
|
project_slug [dir-name]: mydir
|
||||||
|
service [mydir]: myservice
|
||||||
|
component_list [myservice]: mycomponent_one,mycomponent_two
|
||||||
|
Select build:
|
||||||
|
1 - no
|
||||||
|
2 - yes
|
||||||
|
Choose from 1, 2 [1]: 2
|
||||||
|
```
|
||||||
|
|
||||||
|
- Observe that in `/tmp/cookiecutter-docker-compose` you now have your rendered Docker Compose dir:
|
||||||
|
```
|
||||||
|
# tree -a .
|
||||||
|
|
||||||
|
.
|
||||||
|
└── mydir
|
||||||
|
├── build-context
|
||||||
|
│  ├── mycomponent_one
|
||||||
|
│  │  ├── docker-data
|
||||||
|
│  │  │  └── .gitkeep
|
||||||
|
│  │  ├── Dockerfile
|
||||||
|
│  │  └── extras
|
||||||
|
│  │  └── .gitkeep
|
||||||
|
│  └── mycomponent_two
|
||||||
|
│  ├── docker-data
|
||||||
|
│  │  └── .gitkeep
|
||||||
|
│  ├── Dockerfile
|
||||||
|
│  └── extras
|
||||||
|
│  └── .gitkeep
|
||||||
|
├── common-settings.yaml
|
||||||
|
├── compose.override.yaml
|
||||||
|
├── compose.yaml
|
||||||
|
├── env
|
||||||
|
│  └── fqdn_context.env.example
|
||||||
|
└── README.md
|
||||||
|
```
|
||||||
|
|
||||||
|
- For rapid testing you will most likely want to not type prompt answers repeatedly. Give them as command line arguments instead, also specify `--no-input` to suppress prompts:
|
||||||
|
```
|
||||||
|
cookiecutter ~/py-cookiecutter-templates \
|
||||||
|
--no-input \
|
||||||
|
--directory docker-compose \
|
||||||
|
--output-dir /tmp/cookiecutter-docker-compose \
|
||||||
|
project_slug=mydir \
|
||||||
|
service=myservice \
|
||||||
|
component_list=mycomponent_one,mycomponent_two \
|
||||||
|
build=yes
|
||||||
|
```
|
||||||
|
|
||||||
|
## Commit prep
|
||||||
|
|
||||||
|
When you're about ready to commit changes into this repo check the following bullet points.
|
||||||
|
|
||||||
|
- Did you update the [Cookiecutter template README.md file](docker-compose/README.md), here for example for Docker Compose?
|
||||||
|
- Change in behavior?
|
||||||
|
- An updated example directory layout?
|
||||||
|
- Updated example file content?
|
||||||
|
- Did you commit a new completely rendered example directory structure into [docker-compose/examples](docker-compose/examples) dir?
|
||||||
|
- Did you change something that affects existing example directories? If so rerender them.
|
||||||
|
@@ -12,7 +12,10 @@ Cookiecutter interactively prompts you for the following info, here with example
|
|||||||
project_slug [dir-name]: grafana
|
project_slug [dir-name]: grafana
|
||||||
service [grafana]:
|
service [grafana]:
|
||||||
component_list [grafana]: grafana,nginx
|
component_list [grafana]: grafana,nginx
|
||||||
context [ctx]: cncf
|
Select build:
|
||||||
|
1 - no
|
||||||
|
2 - yes
|
||||||
|
Choose from 1, 2 [1]:
|
||||||
```
|
```
|
||||||
|
|
||||||
Done, directory structure and files for your next `docker-compose` project are ready for you to hit the ground running.
|
Done, directory structure and files for your next `docker-compose` project are ready for you to hit the ground running.
|
||||||
@@ -24,14 +27,14 @@ Your four answers translate as follows into rendered files.
|
|||||||
1. The `project_slug` is used only as directory name. A container named `vault` may be fine but the project directory name `hashicorpvault` might be more descriptive.
|
1. The `project_slug` is used only as directory name. A container named `vault` may be fine but the project directory name `hashicorpvault` might be more descriptive.
|
||||||
```
|
```
|
||||||
.
|
.
|
||||||
└── hashicorpvault <--- Here
|
└── hashicorpvault <--- Here
|
||||||
├── build-context
|
├── build-context
|
||||||
│  ├── docker-data
|
│  ├── docker-data
|
||||||
│  │  └── .gitkeep
|
│  │  └── .gitkeep
|
||||||
│  ├── Dockerfile
|
│  ├── Dockerfile
|
||||||
...
|
...
|
||||||
```
|
```
|
||||||
2. The `service` variable by default copies your `project_slug` answer. It's a style decision whether you leave it at that and just hit `Enter`. The service name will come up in rendered `docker-compose.yml` at purely cosmetic locations such as the `networks:` key, `container_name:` and `/opt/docker-data` volume mount presets, here with `ftp` as the service name:
|
2. The `service` variable by default copies your `project_slug` answer. It's a style decision whether you leave it at that and just hit `Enter`. The service name will come up in rendered `compose.yaml` at purely cosmetic locations such as the `networks:` key, `container_name:` and `/opt/docker-data` volume mount presets, here with `ftp` as the service name:
|
||||||
```
|
```
|
||||||
services:
|
services:
|
||||||
mysql:
|
mysql:
|
||||||
@@ -50,30 +53,21 @@ Your four answers translate as follows into rendered files.
|
|||||||
3. Treat `component_list` as the list of Docker images that make up your service. Each `docker-compose` project forms a *__service__* - see above - that consists of either a single or multiple *__components__*. They're your `services:`, your container, volume, variable names etc.:
|
3. Treat `component_list` as the list of Docker images that make up your service. Each `docker-compose` project forms a *__service__* - see above - that consists of either a single or multiple *__components__*. They're your `services:`, your container, volume, variable names etc.:
|
||||||
```
|
```
|
||||||
services:
|
services:
|
||||||
grafana: <---
|
grafana: <---
|
||||||
image: "grafana:${GRAFANA_VERSION}" <---
|
image: "grafana:${GRAFANA_VERSION}" <---
|
||||||
container_name: "grafana-grafana-${CONTEXT}" <---
|
container_name: "grafana-grafana-${CONTEXT}" <---
|
||||||
...
|
...
|
||||||
environment:
|
environment:
|
||||||
# GRAFANA_USER: ${GRAFANA_USER} <---
|
# GRAFANA_USER: ${GRAFANA_USER} <---
|
||||||
# GRAFANA_PASSWORD: ${GRAFANA_PASSWORD} <---
|
# GRAFANA_PASSWORD: ${GRAFANA_PASSWORD} <---
|
||||||
...
|
...
|
||||||
```
|
```
|
||||||
|
|
||||||
4. The last prompt for a `context` is a generic string to help you distinguish deployments. It can be whatever you want such as for example a team name, here `cncf` which shows up as a preset in an example env file. It defaults to `ctx` just it can't be empty.
|
4. Prompt `build` is a yes-no question. Cookiecutter will create a `README.md` file with copy-pastable Docker Compose commands pre-filled. If you answer `yes` to this prompt `README.md` will contain an example paragraph that explains the build process along the lines of:
|
||||||
```
|
```
|
||||||
.
|
docker compose ... --profile 'build' build
|
||||||
└── grafana
|
|
||||||
...
|
|
||||||
└── env
|
|
||||||
└── fully.qualified.domain.name.example <---
|
|
||||||
```
|
|
||||||
|
|
||||||
Which then looks like:
|
|
||||||
```
|
|
||||||
CONTEXT=cncf
|
|
||||||
...
|
|
||||||
```
|
```
|
||||||
|
Whereas by answering `no` (or just hitting `<Enter>` to accept the default of `no`) no such paragraph will be added to the example Markdown file. Build instructions are only really needed if you need to locally build a derivative image.
|
||||||
|
|
||||||
Also check out [the Caveats section](#caveats) at the end to learn what this template does not do well.
|
Also check out [the Caveats section](#caveats) at the end to learn what this template does not do well.
|
||||||
|
|
||||||
@@ -98,11 +92,12 @@ Above example of a multi-component (two in this case) `grafana` service will giv
|
|||||||
│  ├── Dockerfile
|
│  ├── Dockerfile
|
||||||
│  └── extras
|
│  └── extras
|
||||||
│  └── .gitkeep
|
│  └── .gitkeep
|
||||||
├── common-settings.yml
|
├── common-settings.yaml
|
||||||
├── docker-compose.override.yml
|
├── compose.override.yaml
|
||||||
├── docker-compose.yml
|
├── compose.yaml
|
||||||
└── env
|
├── env
|
||||||
└── fully.qualified.domain.name.example
|
│  └── fqdn_context.env.example
|
||||||
|
└── README.md
|
||||||
```
|
```
|
||||||
Check out file contents over in the [examples/grafana](examples/grafana) subdir.
|
Check out file contents over in the [examples/grafana](examples/grafana) subdir.
|
||||||
|
|
||||||
@@ -118,21 +113,22 @@ With an alternative single-component `hashicorpvault` service the result may loo
|
|||||||
│  ├── Dockerfile
|
│  ├── Dockerfile
|
||||||
│  └── extras
|
│  └── extras
|
||||||
│  └── .gitkeep
|
│  └── .gitkeep
|
||||||
├── common-settings.yml
|
├── common-settings.yaml
|
||||||
├── docker-compose.override.yml
|
├── compose.override.yaml
|
||||||
├── docker-compose.yml
|
├── compose.yaml
|
||||||
└── env
|
├── env
|
||||||
└── fully.qualified.domain.name.example
|
│  └── fqdn_context.env.example
|
||||||
|
└── README.md
|
||||||
```
|
```
|
||||||
Check out file contents over in the [examples/hashicorpvault](examples/hashicorpvault) subdir.
|
Check out file contents over in the [examples/hashicorpvault](examples/hashicorpvault) subdir.
|
||||||
|
|
||||||
## Caveats
|
## Caveats
|
||||||
|
|
||||||
Consider Cookiecutter's project directory and rendered files a starting point. It won't do everything perfect.
|
Consider Cookiecutter's project directory and rendered files a starting point. It won't do everything perfectly.
|
||||||
|
|
||||||
Imagine if you will a service that consists of [Infinispan](https://infinispan.org/) among other things. In Docker Hub's content-addressable image store Infinispan's location is at `infinispan/server` so you obviously want that exact string with a forward slash to show up in your `docker-compose.yml` as the `image:` key's value, same with your `Dockerfile`. The `image:` key's value comes from what you enter in Cookiecutter's `component_list` prompt. Component strings are then used to also pre-fill the `volumes:` key.
|
Imagine if you will a service that consists of [Infinispan](https://infinispan.org/) among other things. In Docker Hub's content-addressable image store Infinispan's location is at `infinispan/server` so you obviously want that exact string with a forward slash to show up in your `compose.yaml` as the `image:` key's value, same with your `Dockerfile`. The `image:` key's value comes from what you enter in Cookiecutter's `component_list` prompt. Component strings are then used to also pre-fill the `volumes:` key.
|
||||||
|
|
||||||
This will cause obvious issues (but the `image:` key is kinda correct):
|
_**This**_ will cause obvious issues (but the `image:` key is kinda correct):
|
||||||
```
|
```
|
||||||
services:
|
services:
|
||||||
infinispan/server:
|
infinispan/server:
|
||||||
@@ -140,7 +136,7 @@ services:
|
|||||||
container_name: "cacheman-infinispan/server-${CONTEXT}"
|
container_name: "cacheman-infinispan/server-${CONTEXT}"
|
||||||
```
|
```
|
||||||
|
|
||||||
This won't cause issues (but you'll have to then go in and manually change the `image:` key to use `infinispan/server`):
|
_**This**_ won't cause issues (but you'll have to then go in and manually change the `image:` key to use `infinispan/server`):
|
||||||
```
|
```
|
||||||
services:
|
services:
|
||||||
infinispan:
|
infinispan:
|
||||||
@@ -148,4 +144,4 @@ services:
|
|||||||
container_name: "cacheman-infinispan-${CONTEXT}"
|
container_name: "cacheman-infinispan-${CONTEXT}"
|
||||||
```
|
```
|
||||||
|
|
||||||
You're going to want to keep it simply and go with option 2.
|
You're going to want to keep it simple and go with option 2.
|
||||||
|
@@ -5,6 +5,5 @@
|
|||||||
"__service_slug": "{{ cookiecutter.service.lower().replace(' ', '_').replace('-', '_') }}",
|
"__service_slug": "{{ cookiecutter.service.lower().replace(' ', '_').replace('-', '_') }}",
|
||||||
"component_list": "{{ cookiecutter.__service_slug }}",
|
"component_list": "{{ cookiecutter.__service_slug }}",
|
||||||
"__component_list_slug": "{{ cookiecutter.component_list.lower().replace(' ', '_').replace('-', '_') }}",
|
"__component_list_slug": "{{ cookiecutter.component_list.lower().replace(' ', '_').replace('-', '_') }}",
|
||||||
"context": "ctx",
|
"build": ["yes", "no"]
|
||||||
"__context_slug": "{{ cookiecutter.context.lower().replace(' ', '_').replace('-', '_') }}"
|
|
||||||
}
|
}
|
||||||
|
142
docker-compose/examples/grafana/README.md
Normal file
142
docker-compose/examples/grafana/README.md
Normal file
@@ -0,0 +1,142 @@
|
|||||||
|
# FIXME
|
||||||
|
|
||||||
|
Search and replace all mentions of FIXME with sensible content in this file and in [compose.yaml](compose.yaml).
|
||||||
|
|
||||||
|
# Grafana Docker Compose files
|
||||||
|
|
||||||
|
Docker Compose files to spin up an instance of Grafana FIXME capitalization FIXME.
|
||||||
|
|
||||||
|
# How to run
|
||||||
|
|
||||||
|
Add a `COMPOSE_ENV` file and save its location as a shell variable along with the location where this repo lives, here for example `/opt/containers/grafana` plus all other variables. At [env/fqdn_context.env.example](env/fqdn_context.env.example) you'll find an example environment file.
|
||||||
|
|
||||||
|
When everything's ready start Grafana with Docker Compose, otherwise head down to [Initial setup](#initial-setup) first.
|
||||||
|
|
||||||
|
## Environment
|
||||||
|
|
||||||
|
```
|
||||||
|
export COMPOSE_DIR='/opt/containers/grafana'
|
||||||
|
export COMPOSE_CTX='ux_vilnius'
|
||||||
|
export COMPOSE_PROJECT='grafana-'"${COMPOSE_CTX}"
|
||||||
|
export COMPOSE_FILE="${COMPOSE_DIR}"'/compose.yaml'
|
||||||
|
export COMPOSE_ENV=<add accordingly>
|
||||||
|
```
|
||||||
|
|
||||||
|
## Context
|
||||||
|
|
||||||
|
On your deployment machine create the necessary Docker context to connect to and control the Docker daemon on whatever target host you'll be using, for example:
|
||||||
|
```
|
||||||
|
docker context create fully.qualified.domain.name --docker 'host=ssh://root@fully.qualified.domain.name'
|
||||||
|
```
|
||||||
|
|
||||||
|
## Pull
|
||||||
|
|
||||||
|
Pull images from Docker Hub verbatim.
|
||||||
|
|
||||||
|
```
|
||||||
|
docker compose --project-name "${COMPOSE_PROJECT}" --file "${COMPOSE_FILE}" --env-file "${COMPOSE_ENV}" --profile 'full' pull
|
||||||
|
```
|
||||||
|
|
||||||
|
## Copy to target
|
||||||
|
|
||||||
|
Copy images to target Docker host, that is assuming you deploy to a machine that itself has no network route to reach Docker Hub or your private registry of choice. Copying in its simplest form involves a local `docker save` and a remote `docker load`. Consider the helper mini-project [quico.space/Quico/copy-docker](https://quico.space/Quico/copy-docker) where [copy-docker.sh](https://quico.space/Quico/copy-docker/src/branch/main/copy-docker.sh) allows the following workflow:
|
||||||
|
|
||||||
|
```
|
||||||
|
source "${COMPOSE_ENV}"
|
||||||
|
# FIXME Docker Hub image name with or without slash? FIXME
|
||||||
|
for image in 'grafana:'"${GRAFANA_VERSION}" 'nginx:'"${NGINX_VERSION}"; do
|
||||||
|
copy-docker "${image}" fully.qualified.domain.name
|
||||||
|
done
|
||||||
|
```
|
||||||
|
|
||||||
|
## Start
|
||||||
|
|
||||||
|
FIXME Does the service use a virtual IP address? FIXME
|
||||||
|
|
||||||
|
Make sure your service's virtual IP address is bound on your target host then start containers.
|
||||||
|
|
||||||
|
```
|
||||||
|
docker --context 'fully.qualified.domain.name' compose --project-name "${COMPOSE_PROJECT}" --file "${COMPOSE_FILE}" --env-file "${COMPOSE_ENV}" --profile 'full' up --detach
|
||||||
|
```
|
||||||
|
|
||||||
|
# Initial setup
|
||||||
|
|
||||||
|
We're assuming you run Docker Compose workloads with ZFS-based bind mounts. ZFS management, creating a zpool and setting adequate properties for its datasets is out of scope of this document.
|
||||||
|
|
||||||
|
## Datasets
|
||||||
|
|
||||||
|
Create ZFS datasets and set permissions as needed.
|
||||||
|
|
||||||
|
* Parent dateset
|
||||||
|
```
|
||||||
|
zfs create -o mountpoint=/opt/docker-data 'zpool/docker-data'
|
||||||
|
```
|
||||||
|
|
||||||
|
* Container-specific datasets
|
||||||
|
```
|
||||||
|
zfs create -p 'zpool/docker-data/grafana-'"${COMPOSE_CTX}"'/grafana/data/db'
|
||||||
|
zfs create -p 'zpool/docker-data/grafana-'"${COMPOSE_CTX}"'/grafana/data/logs'
|
||||||
|
zfs create -p 'zpool/docker-data/grafana-'"${COMPOSE_CTX}"'/grafana/config'
|
||||||
|
zfs create -p 'zpool/docker-data/grafana-'"${COMPOSE_CTX}"'/nginx/data/db'
|
||||||
|
zfs create -p 'zpool/docker-data/grafana-'"${COMPOSE_CTX}"'/nginx/data/logs'
|
||||||
|
zfs create -p 'zpool/docker-data/grafana-'"${COMPOSE_CTX}"'/nginx/config'
|
||||||
|
```
|
||||||
|
FIXME When changing bind mount locations to real ones remember to also update `volumes:` in [compose.yaml](compose.yaml) FIXME
|
||||||
|
|
||||||
|
* Create subdirs
|
||||||
|
```
|
||||||
|
mkdir -p '/opt/docker-data/grafana-'"${COMPOSE_CTX}"'/grafana/'{'.ssh','config','data','projects'}
|
||||||
|
```
|
||||||
|
|
||||||
|
* Change ownership
|
||||||
|
```
|
||||||
|
chown -R 1000:1000 '/opt/docker-data/grafana-'"${COMPOSE_CTX}"'/grafana/data/'*
|
||||||
|
```
|
||||||
|
|
||||||
|
## Additional files
|
||||||
|
|
||||||
|
Place the following files on target server. Use the directory structure at [build-context](build-context) as a guide, specifically at `docker-data`.
|
||||||
|
|
||||||
|
FIXME Add details about files that aren't self-explanatory FIXME
|
||||||
|
|
||||||
|
```
|
||||||
|
build-context/
|
||||||
|
├── grafana
|
||||||
|
│ ├── docker-data
|
||||||
|
│ | └── config
|
||||||
|
│ │ └── grafana.cfg
|
||||||
|
│ ├── ...
|
||||||
|
│ └── ...
|
||||||
|
└── nginx
|
||||||
|
├── docker-data
|
||||||
|
| └── config
|
||||||
|
│ └── nginx.cfg
|
||||||
|
├── ...
|
||||||
|
└── ...
|
||||||
|
```
|
||||||
|
|
||||||
|
When done head back up to [How to run](#how-to-run).
|
||||||
|
|
||||||
|
# Development
|
||||||
|
|
||||||
|
## Conventional commits
|
||||||
|
|
||||||
|
This project uses [Conventional Commits](https://www.conventionalcommits.org/) for its commit messages.
|
||||||
|
|
||||||
|
### Commit types
|
||||||
|
|
||||||
|
Commit _types_ besides `fix` and `feat` are:
|
||||||
|
|
||||||
|
- `refactor`: Keeping functionality while streamlining or otherwise improving function flow
|
||||||
|
- `docs`: Documentation for project or components
|
||||||
|
|
||||||
|
### Commit scopes
|
||||||
|
|
||||||
|
The following _scopes_ are known for this project. A Conventional Commits commit message may optionally use one of the following scopes or none:
|
||||||
|
|
||||||
|
- `grafana`: A change to how the `grafana` service component works
|
||||||
|
- `nginx`: A change to how the `nginx` service component works
|
||||||
|
- `build`: Build-related changes such as `Dockerfile` fixes and features.
|
||||||
|
- `mount`: Volume or bind mount-related changes.
|
||||||
|
- `net`: Networking, IP addressing, routing changes
|
||||||
|
- `meta`: Affects the project's repo layout, file names etc.
|
@@ -1,6 +1,6 @@
|
|||||||
# For the remainder of this Dockerfile EXAMPLE_ARG_FOR_DOCKERFILE will be
|
# For the remainder of this Dockerfile EXAMPLE_ARG_FOR_DOCKERFILE will be
|
||||||
# available with a value of 'must_be_available_in_dockerfile', check out the env
|
# available with a value of 'must_be_available_in_dockerfile', check out the env
|
||||||
# file at 'env/fully.qualified.domain.name.example' for reference.
|
# file at 'env/fqdn_context.env.example' for reference.
|
||||||
# ARG EXAMPLE_ARG_FOR_DOCKERFILE
|
# ARG EXAMPLE_ARG_FOR_DOCKERFILE
|
||||||
|
|
||||||
# Another env var, this one's needed in the example build step below:
|
# Another env var, this one's needed in the example build step below:
|
||||||
|
@@ -1,6 +1,6 @@
|
|||||||
# For the remainder of this Dockerfile EXAMPLE_ARG_FOR_DOCKERFILE will be
|
# For the remainder of this Dockerfile EXAMPLE_ARG_FOR_DOCKERFILE will be
|
||||||
# available with a value of 'must_be_available_in_dockerfile', check out the env
|
# available with a value of 'must_be_available_in_dockerfile', check out the env
|
||||||
# file at 'env/fully.qualified.domain.name.example' for reference.
|
# file at 'env/fqdn_context.env.example' for reference.
|
||||||
# ARG EXAMPLE_ARG_FOR_DOCKERFILE
|
# ARG EXAMPLE_ARG_FOR_DOCKERFILE
|
||||||
|
|
||||||
# Another env var, this one's needed in the example build step below:
|
# Another env var, this one's needed in the example build step below:
|
||||||
|
@@ -1,5 +1,6 @@
|
|||||||
services:
|
services:
|
||||||
grafana-build:
|
grafana-build:
|
||||||
|
# FIXME image name with or without slash? Docker Hub or private registry? With or without *_BUILD_DATE? FIXME
|
||||||
image: "grafana:${GRAFANA_VERSION}"
|
image: "grafana:${GRAFANA_VERSION}"
|
||||||
profiles: ["build", "build-grafana"]
|
profiles: ["build", "build-grafana"]
|
||||||
build:
|
build:
|
||||||
@@ -9,6 +10,7 @@ services:
|
|||||||
EXAMPLE_ARG_FOR_DOCKERFILE: "${EXAMPLE_ARG_FROM_ENV_FILE}"
|
EXAMPLE_ARG_FOR_DOCKERFILE: "${EXAMPLE_ARG_FROM_ENV_FILE}"
|
||||||
GRAFANA_VERSION: "${GRAFANA_VERSION}"
|
GRAFANA_VERSION: "${GRAFANA_VERSION}"
|
||||||
nginx-build:
|
nginx-build:
|
||||||
|
# FIXME image name with or without slash? Docker Hub or private registry? With or without *_BUILD_DATE? FIXME
|
||||||
image: "nginx:${NGINX_VERSION}"
|
image: "nginx:${NGINX_VERSION}"
|
||||||
profiles: ["build", "build-nginx"]
|
profiles: ["build", "build-nginx"]
|
||||||
build:
|
build:
|
72
docker-compose/examples/grafana/compose.yaml
Normal file
72
docker-compose/examples/grafana/compose.yaml
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
services:
|
||||||
|
grafana:
|
||||||
|
# FIXME image name with or without slash? Docker Hub or private registry? With or without *_BUILD_DATE? FIXME
|
||||||
|
image: "grafana:${GRAFANA_VERSION}"
|
||||||
|
container_name: "grafana-grafana-${CONTEXT}"
|
||||||
|
networks:
|
||||||
|
grafana-default:
|
||||||
|
profiles: ["full", "grafana"]
|
||||||
|
depends_on:
|
||||||
|
nginx:
|
||||||
|
condition: service_healthy
|
||||||
|
ulimits:
|
||||||
|
nproc: ${ULIMIT_NPROC:-65535}
|
||||||
|
nofile:
|
||||||
|
soft: ${ULIMIT_NPROC:-65535}
|
||||||
|
hard: ${ULIMIT_NPROC:-65535}
|
||||||
|
extends:
|
||||||
|
file: common-settings.yaml
|
||||||
|
service: common-settings
|
||||||
|
ports:
|
||||||
|
# - "8080:80"
|
||||||
|
volumes:
|
||||||
|
# When changing bind mount locations to real ones remember to
|
||||||
|
# also update "Initial setup" section in README.md.
|
||||||
|
# - /opt/docker-data/grafana-${CONTEXT}/grafana/data/db:/usr/lib/grafana
|
||||||
|
# - /opt/docker-data/grafana-${CONTEXT}/grafana/data/logs:/var/log/grafana
|
||||||
|
# - /opt/docker-data/grafana-${CONTEXT}/grafana/config:/etc/grafana
|
||||||
|
environment:
|
||||||
|
# GRAFANA_USER: ${GRAFANA_USER}
|
||||||
|
# GRAFANA_PASSWORD: ${GRAFANA_PASSWORD}
|
||||||
|
nginx:
|
||||||
|
# FIXME image name with or without slash? Docker Hub or private registry? With or without *_BUILD_DATE? FIXME
|
||||||
|
image: "nginx:${NGINX_VERSION}"
|
||||||
|
container_name: "grafana-nginx-${CONTEXT}"
|
||||||
|
networks:
|
||||||
|
grafana-default:
|
||||||
|
profiles: ["full", "nginx"]
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "fping", "--count=1", "${GRAFANA_VIP}", "--period=500", "--quiet"]
|
||||||
|
interval: 3s
|
||||||
|
timeout: 1s
|
||||||
|
retries: 60
|
||||||
|
start_period: 2s
|
||||||
|
ulimits:
|
||||||
|
nproc: ${ULIMIT_NPROC:-65535}
|
||||||
|
nofile:
|
||||||
|
soft: ${ULIMIT_NPROC:-65535}
|
||||||
|
hard: ${ULIMIT_NPROC:-65535}
|
||||||
|
extends:
|
||||||
|
file: common-settings.yaml
|
||||||
|
service: common-settings
|
||||||
|
ports:
|
||||||
|
# - "8080:80"
|
||||||
|
volumes:
|
||||||
|
# When changing bind mount locations to real ones remember to
|
||||||
|
# also update "Initial setup" section in README.md.
|
||||||
|
# - /opt/docker-data/grafana-${CONTEXT}/nginx/data/db:/usr/lib/nginx
|
||||||
|
# - /opt/docker-data/grafana-${CONTEXT}/nginx/data/logs:/var/log/nginx
|
||||||
|
# - /opt/docker-data/grafana-${CONTEXT}/nginx/config:/etc/nginx
|
||||||
|
environment:
|
||||||
|
# NGINX_USER: ${NGINX_USER}
|
||||||
|
# NGINX_PASSWORD: ${NGINX_PASSWORD}
|
||||||
|
networks:
|
||||||
|
grafana-default:
|
||||||
|
name: grafana-${CONTEXT}
|
||||||
|
driver: bridge
|
||||||
|
driver_opts:
|
||||||
|
com.docker.network.enable_ipv6: "false"
|
||||||
|
ipam:
|
||||||
|
driver: default
|
||||||
|
config:
|
||||||
|
- subnet: ${SUBNET}
|
@@ -1,47 +0,0 @@
|
|||||||
services:
|
|
||||||
grafana:
|
|
||||||
image: "grafana:${GRAFANA_VERSION}"
|
|
||||||
container_name: "grafana-grafana-${CONTEXT}"
|
|
||||||
networks:
|
|
||||||
grafana-default:
|
|
||||||
profiles: ["full", "grafana"]
|
|
||||||
extends:
|
|
||||||
file: common-settings.yml
|
|
||||||
service: common-settings
|
|
||||||
ports:
|
|
||||||
# - "8080:80"
|
|
||||||
volumes:
|
|
||||||
# - /opt/docker-data/grafana-grafana-${CONTEXT}/grafana/data/db:/usr/lib/grafana
|
|
||||||
# - /opt/docker-data/grafana-grafana-${CONTEXT}/grafana/data/logs:/var/log/grafana
|
|
||||||
# - /opt/docker-data/grafana-grafana-${CONTEXT}/grafana/config:/etc/grafana
|
|
||||||
environment:
|
|
||||||
# GRAFANA_USER: ${GRAFANA_USER}
|
|
||||||
# GRAFANA_PASSWORD: ${GRAFANA_PASSWORD}
|
|
||||||
nginx:
|
|
||||||
image: "nginx:${NGINX_VERSION}"
|
|
||||||
container_name: "grafana-nginx-${CONTEXT}"
|
|
||||||
networks:
|
|
||||||
grafana-default:
|
|
||||||
profiles: ["full", "nginx"]
|
|
||||||
extends:
|
|
||||||
file: common-settings.yml
|
|
||||||
service: common-settings
|
|
||||||
ports:
|
|
||||||
# - "8080:80"
|
|
||||||
volumes:
|
|
||||||
# - /opt/docker-data/grafana-nginx-${CONTEXT}/nginx/data/db:/usr/lib/nginx
|
|
||||||
# - /opt/docker-data/grafana-nginx-${CONTEXT}/nginx/data/logs:/var/log/nginx
|
|
||||||
# - /opt/docker-data/grafana-nginx-${CONTEXT}/nginx/config:/etc/nginx
|
|
||||||
environment:
|
|
||||||
# NGINX_USER: ${NGINX_USER}
|
|
||||||
# NGINX_PASSWORD: ${NGINX_PASSWORD}
|
|
||||||
networks:
|
|
||||||
grafana-default:
|
|
||||||
name: grafana-${CONTEXT}
|
|
||||||
driver: bridge
|
|
||||||
driver_opts:
|
|
||||||
com.docker.network.enable_ipv6: "false"
|
|
||||||
ipam:
|
|
||||||
driver: default
|
|
||||||
config:
|
|
||||||
# - subnet: 172.21.184.0/24
|
|
34
docker-compose/examples/grafana/env/fqdn_context.env.example
vendored
Normal file
34
docker-compose/examples/grafana/env/fqdn_context.env.example
vendored
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
CONTEXT=ux_vilnius
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# Set something sensible here and uncomment
|
||||||
|
# ---
|
||||||
|
# GRAFANA_VERSION=x.y.z
|
||||||
|
# NGINX_VERSION=x.y.z
|
||||||
|
# GRAFANA_VIP=10.1.1.2
|
||||||
|
# GRAFANA_BUILD_DATE=20230731
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# Feel free to leave defaults. They apply while these vars are commented out
|
||||||
|
# ---
|
||||||
|
# RESTARTPOLICY=unless-stopped
|
||||||
|
# TIMEZONE=Etc/UTC
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# Subnet to use for this Docker Compose project. Docker defaults to
|
||||||
|
# container networks in prefix 172.16.0.0/12 which is 1 million addresses in
|
||||||
|
# the range from 172.16.0.0 to 172.31.255.255. Docker uses 172.17.0.0/16 for
|
||||||
|
# itself. Use any sensible prefix in 172.16.0.0/12 here except for Docker's
|
||||||
|
# own 172.17.0.0/16.
|
||||||
|
# ---
|
||||||
|
SUBNET=172.30.95.0/24
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# See 'compose.override.yaml' for how to make a variable available in
|
||||||
|
# a Dockerfile
|
||||||
|
# ---
|
||||||
|
# EXAMPLE_ARG_FROM_ENV_FILE=must_be_available_in_dockerfile
|
@@ -1,30 +0,0 @@
|
|||||||
CONTEXT=cncf
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Set something sensible here and uncomment
|
|
||||||
# ---
|
|
||||||
# GRAFANA_VERSION=x.y.z
|
|
||||||
# NGINX_VERSION=x.y.z
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# A ${LOCATION} var is usually not needed. It may be helpful when a ${CONTEXT}
|
|
||||||
# extends over more than one location e.g. to bind-mount location-specific
|
|
||||||
# config files or certificates into a container.
|
|
||||||
# ---
|
|
||||||
# LOCATION=
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Feel free to leave defaults. They apply while these vars are commented out
|
|
||||||
# ---
|
|
||||||
# RESTARTPOLICY=unless-stopped
|
|
||||||
# TIMEZONE=Etc/UTC
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# See 'docker-compose.override.yml' for how to make a variable available in
|
|
||||||
# a Dockerfile
|
|
||||||
# ---
|
|
||||||
# EXAMPLE_ARG_FROM_ENV_FILE=must_be_available_in_dockerfile
|
|
149
docker-compose/examples/hashicorpvault/README.md
Normal file
149
docker-compose/examples/hashicorpvault/README.md
Normal file
@@ -0,0 +1,149 @@
|
|||||||
|
# FIXME
|
||||||
|
|
||||||
|
Search and replace all mentions of FIXME with sensible content in this file and in [compose.yaml](compose.yaml).
|
||||||
|
|
||||||
|
# Vault Docker Compose files
|
||||||
|
|
||||||
|
Docker Compose files to spin up an instance of Vault FIXME capitalization FIXME.
|
||||||
|
|
||||||
|
# How to run
|
||||||
|
|
||||||
|
Add a `COMPOSE_ENV` file and save its location as a shell variable along with the location where this repo lives, here for example `/opt/containers/hashicorpvault` plus all other variables. At [env/fqdn_context.env.example](env/fqdn_context.env.example) you'll find an example environment file.
|
||||||
|
|
||||||
|
When everything's ready start Vault with Docker Compose, otherwise head down to [Initial setup](#initial-setup) first.
|
||||||
|
|
||||||
|
## Environment
|
||||||
|
|
||||||
|
```
|
||||||
|
export COMPOSE_DIR='/opt/containers/hashicorpvault'
|
||||||
|
export COMPOSE_CTX='ux_vilnius'
|
||||||
|
export COMPOSE_PROJECT='vault-'"${COMPOSE_CTX}"
|
||||||
|
export COMPOSE_FILE="${COMPOSE_DIR}"'/compose.yaml'
|
||||||
|
export COMPOSE_OVERRIDE="${COMPOSE_DIR%/}"'/compose.override.yaml'
|
||||||
|
export COMPOSE_ENV=<add accordingly>
|
||||||
|
```
|
||||||
|
|
||||||
|
## Context
|
||||||
|
|
||||||
|
On your deployment machine create the necessary Docker context to connect to and control the Docker daemon on whatever target host you'll be using, for example:
|
||||||
|
```
|
||||||
|
docker context create fully.qualified.domain.name --docker 'host=ssh://root@fully.qualified.domain.name'
|
||||||
|
```
|
||||||
|
|
||||||
|
## Build
|
||||||
|
|
||||||
|
> Skip to [Pull](#pull) if you already have images in your private registry ready to use. Otherwise read on to build them now.
|
||||||
|
|
||||||
|
FIXME We build the `vault` image locally. Our adjustment to the official image is simply adding `/tmp/vault` to it. See [build-context/Dockerfile](build-context/Dockerfile). We use `/tmp/vault` to bind-mount a dedicated ZFS dataset for the application's `tmpdir` location.
|
||||||
|
|
||||||
|
```
|
||||||
|
docker compose --project-name "${COMPOSE_PROJECT}" --file "${COMPOSE_FILE}" --file "${COMPOSE_OVERRIDE}" --env-file "${COMPOSE_ENV}" --profile 'build' build
|
||||||
|
```
|
||||||
|
|
||||||
|
## Push
|
||||||
|
|
||||||
|
Push to Docker Hub or your private registry. Setting up a private registry is out of scope of this repo. Once you have a registry available you can use it like so:
|
||||||
|
- On your OS install a Docker credential helper per [github.com/docker/docker-credential-helpers](https://github.com/docker/docker-credential-helpers). This will make sure you won't store credentials hashed (and unencrypted) in `~/.docker/config.json`. On an example Arch Linux machine where D-Bus Secret Service exists this will come via something like the [docker-credential-secretservice-bin](https://aur.archlinux.org/packages/docker-credential-secretservice-bin) Arch User Repository package. Just install and you're done.
|
||||||
|
- Do a `docker login registry.example.com`, enter username and password, confirm login.
|
||||||
|
|
||||||
|
```
|
||||||
|
source "${COMPOSE_ENV}"
|
||||||
|
docker push "registry.example.com/project/vault:${VAULT_BUILD_DATE}-${VAULT_VERSION}"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Pull
|
||||||
|
|
||||||
|
> Skip this step if you just built images that still exist locally on your build host.
|
||||||
|
|
||||||
|
FIXME Rewrite either [Build](#build) or this paragraph for which images are built and which ones pulled, `--profile 'full'` may not make sense.
|
||||||
|
|
||||||
|
```
|
||||||
|
docker compose --project-name "${COMPOSE_PROJECT}" --file "${COMPOSE_FILE}" --env-file "${COMPOSE_ENV}" --profile 'full' pull
|
||||||
|
```
|
||||||
|
|
||||||
|
## Copy to target
|
||||||
|
|
||||||
|
Copy images to target Docker host, that is assuming you deploy to a machine that itself has no network route to reach Docker Hub or your private registry of choice. Copying in its simplest form involves a local `docker save` and a remote `docker load`. Consider the helper mini-project [quico.space/Quico/copy-docker](https://quico.space/Quico/copy-docker) where [copy-docker.sh](https://quico.space/Quico/copy-docker/src/branch/main/copy-docker.sh) allows the following workflow:
|
||||||
|
|
||||||
|
```
|
||||||
|
source "${COMPOSE_ENV}"
|
||||||
|
# FIXME Docker Hub image name with or without slash? FIXME
|
||||||
|
copy-docker 'vault:'"${VAULT_VERSION}" fully.qualified.domain.name
|
||||||
|
```
|
||||||
|
|
||||||
|
## Start
|
||||||
|
|
||||||
|
```
|
||||||
|
docker --context 'fully.qualified.domain.name' compose --project-name "${COMPOSE_PROJECT}" --file "${COMPOSE_FILE}" --env-file "${COMPOSE_ENV}" up --detach
|
||||||
|
```
|
||||||
|
|
||||||
|
# Initial setup
|
||||||
|
|
||||||
|
We're assuming you run Docker Compose workloads with ZFS-based bind mounts. ZFS management, creating a zpool and setting adequate properties for its datasets is out of scope of this document.
|
||||||
|
|
||||||
|
## Datasets
|
||||||
|
|
||||||
|
Create ZFS datasets and set permissions as needed.
|
||||||
|
|
||||||
|
* Parent dateset
|
||||||
|
```
|
||||||
|
zfs create -o mountpoint=/opt/docker-data 'zpool/docker-data'
|
||||||
|
```
|
||||||
|
|
||||||
|
* Container-specific datasets
|
||||||
|
```
|
||||||
|
zfs create -p 'zpool/docker-data/vault-'"${COMPOSE_CTX}"'/vault/data/db'
|
||||||
|
zfs create -p 'zpool/docker-data/vault-'"${COMPOSE_CTX}"'/vault/data/logs'
|
||||||
|
zfs create -p 'zpool/docker-data/vault-'"${COMPOSE_CTX}"'/vault/config'
|
||||||
|
```
|
||||||
|
FIXME When changing bind mount locations to real ones remember to also update `volumes:` in [compose.yaml](compose.yaml) FIXME
|
||||||
|
|
||||||
|
* Create subdirs
|
||||||
|
```
|
||||||
|
mkdir -p '/opt/docker-data/vault-'"${COMPOSE_CTX}"'/vault/'{'.ssh','config','data','projects'}
|
||||||
|
```
|
||||||
|
|
||||||
|
* Change ownership
|
||||||
|
```
|
||||||
|
chown -R 1000:1000 '/opt/docker-data/vault-'"${COMPOSE_CTX}"'/vault/data/'*
|
||||||
|
```
|
||||||
|
|
||||||
|
## Additional files
|
||||||
|
|
||||||
|
Place the following files on target server. Use the directory structure at [build-context](build-context) as a guide, specifically at `docker-data`.
|
||||||
|
|
||||||
|
FIXME Add details about files that aren't self-explanatory FIXME
|
||||||
|
|
||||||
|
```
|
||||||
|
build-context/
|
||||||
|
├── docker-data
|
||||||
|
│ └── config
|
||||||
|
│ └── vault.cfg
|
||||||
|
├── ...
|
||||||
|
└── ...
|
||||||
|
```
|
||||||
|
|
||||||
|
When done head back up to [How to run](#how-to-run).
|
||||||
|
|
||||||
|
# Development
|
||||||
|
|
||||||
|
## Conventional commits
|
||||||
|
|
||||||
|
This project uses [Conventional Commits](https://www.conventionalcommits.org/) for its commit messages.
|
||||||
|
|
||||||
|
### Commit types
|
||||||
|
|
||||||
|
Commit _types_ besides `fix` and `feat` are:
|
||||||
|
|
||||||
|
- `refactor`: Keeping functionality while streamlining or otherwise improving function flow
|
||||||
|
- `docs`: Documentation for project or components
|
||||||
|
|
||||||
|
### Commit scopes
|
||||||
|
|
||||||
|
The following _scopes_ are known for this project. A Conventional Commits commit message may optionally use one of the following scopes or none:
|
||||||
|
|
||||||
|
- `vault`: A change to how the `vault` service component works
|
||||||
|
- `build`: Build-related changes such as `Dockerfile` fixes and features.
|
||||||
|
- `mount`: Volume or bind mount-related changes.
|
||||||
|
- `net`: Networking, IP addressing, routing changes
|
||||||
|
- `meta`: Affects the project's repo layout, file names etc.
|
@@ -1,6 +1,6 @@
|
|||||||
# For the remainder of this Dockerfile EXAMPLE_ARG_FOR_DOCKERFILE will be
|
# For the remainder of this Dockerfile EXAMPLE_ARG_FOR_DOCKERFILE will be
|
||||||
# available with a value of 'must_be_available_in_dockerfile', check out the env
|
# available with a value of 'must_be_available_in_dockerfile', check out the env
|
||||||
# file at 'env/fully.qualified.domain.name.example' for reference.
|
# file at 'env/fqdn_context.env.example' for reference.
|
||||||
# ARG EXAMPLE_ARG_FOR_DOCKERFILE
|
# ARG EXAMPLE_ARG_FOR_DOCKERFILE
|
||||||
|
|
||||||
# Another env var, this one's needed in the example build step below:
|
# Another env var, this one's needed in the example build step below:
|
||||||
|
@@ -1,9 +1,10 @@
|
|||||||
services:
|
services:
|
||||||
vault-build:
|
vault-build:
|
||||||
image: "vault:${VAULT_VERSION}"
|
# FIXME image name with or without slash? Docker Hub or private registry? With or without *_BUILD_DATE? FIXME
|
||||||
|
image: "registry.example.com/project/vault:${VAULT_BUILD_DATE}-${VAULT_VERSION}"
|
||||||
profiles: ["build"]
|
profiles: ["build"]
|
||||||
build:
|
build:
|
||||||
context: "build-context/vault"
|
context: "build-context"
|
||||||
dockerfile: Dockerfile
|
dockerfile: Dockerfile
|
||||||
args:
|
args:
|
||||||
EXAMPLE_ARG_FOR_DOCKERFILE: "${EXAMPLE_ARG_FROM_ENV_FILE}"
|
EXAMPLE_ARG_FOR_DOCKERFILE: "${EXAMPLE_ARG_FROM_ENV_FILE}"
|
36
docker-compose/examples/hashicorpvault/compose.yaml
Normal file
36
docker-compose/examples/hashicorpvault/compose.yaml
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
services:
|
||||||
|
vault:
|
||||||
|
# FIXME image name with or without slash? Docker Hub or private registry? With or without *_BUILD_DATE? FIXME
|
||||||
|
image: "registry.example.com/project/vault:${VAULT_BUILD_DATE}-${VAULT_VERSION}"
|
||||||
|
container_name: "vault-${CONTEXT}"
|
||||||
|
networks:
|
||||||
|
vault-default:
|
||||||
|
ulimits:
|
||||||
|
nproc: ${ULIMIT_NPROC:-65535}
|
||||||
|
nofile:
|
||||||
|
soft: ${ULIMIT_NPROC:-65535}
|
||||||
|
hard: ${ULIMIT_NPROC:-65535}
|
||||||
|
extends:
|
||||||
|
file: common-settings.yaml
|
||||||
|
service: common-settings
|
||||||
|
ports:
|
||||||
|
# - "8080:80"
|
||||||
|
volumes:
|
||||||
|
# When changing bind mount locations to real ones remember to
|
||||||
|
# also update "Initial setup" section in README.md.
|
||||||
|
# - /opt/docker-data/vault-${CONTEXT}/vault/data/db:/usr/lib/vault
|
||||||
|
# - /opt/docker-data/vault-${CONTEXT}/vault/data/logs:/var/log/vault
|
||||||
|
# - /opt/docker-data/vault-${CONTEXT}/vault/config:/etc/vault
|
||||||
|
environment:
|
||||||
|
# VAULT_USER: ${VAULT_USER}
|
||||||
|
# VAULT_PASSWORD: ${VAULT_PASSWORD}
|
||||||
|
networks:
|
||||||
|
vault-default:
|
||||||
|
name: vault-${CONTEXT}
|
||||||
|
driver: bridge
|
||||||
|
driver_opts:
|
||||||
|
com.docker.network.enable_ipv6: "false"
|
||||||
|
ipam:
|
||||||
|
driver: default
|
||||||
|
config:
|
||||||
|
- subnet: ${SUBNET}
|
@@ -1,28 +0,0 @@
|
|||||||
services:
|
|
||||||
vault:
|
|
||||||
image: "vault:${VAULT_VERSION}"
|
|
||||||
container_name: "vault-${CONTEXT}"
|
|
||||||
networks:
|
|
||||||
vault-default:
|
|
||||||
extends:
|
|
||||||
file: common-settings.yml
|
|
||||||
service: common-settings
|
|
||||||
ports:
|
|
||||||
# - "8080:80"
|
|
||||||
volumes:
|
|
||||||
# - /opt/docker-data/vault-${CONTEXT}/data/db:/usr/lib/vault
|
|
||||||
# - /opt/docker-data/vault-${CONTEXT}/data/logs:/var/log/vault
|
|
||||||
# - /opt/docker-data/vault-${CONTEXT}/config:/etc/vault
|
|
||||||
environment:
|
|
||||||
# VAULT_USER: ${VAULT_USER}
|
|
||||||
# VAULT_PASSWORD: ${VAULT_PASSWORD}
|
|
||||||
networks:
|
|
||||||
vault-default:
|
|
||||||
name: vault-${CONTEXT}
|
|
||||||
driver: bridge
|
|
||||||
driver_opts:
|
|
||||||
com.docker.network.enable_ipv6: "false"
|
|
||||||
ipam:
|
|
||||||
driver: default
|
|
||||||
config:
|
|
||||||
# - subnet: 172.21.184.0/24
|
|
33
docker-compose/examples/hashicorpvault/env/fqdn_context.env.example
vendored
Normal file
33
docker-compose/examples/hashicorpvault/env/fqdn_context.env.example
vendored
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
CONTEXT=ux_vilnius
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# Set something sensible here and uncomment
|
||||||
|
# ---
|
||||||
|
# VAULT_VERSION=x.y.z
|
||||||
|
# VAULT_VIP=10.1.1.2
|
||||||
|
# VAULT_BUILD_DATE=20230731
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# Feel free to leave defaults. They apply while these vars are commented out
|
||||||
|
# ---
|
||||||
|
# RESTARTPOLICY=unless-stopped
|
||||||
|
# TIMEZONE=Etc/UTC
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# Subnet to use for this Docker Compose project. Docker defaults to
|
||||||
|
# container networks in prefix 172.16.0.0/12 which is 1 million addresses in
|
||||||
|
# the range from 172.16.0.0 to 172.31.255.255. Docker uses 172.17.0.0/16 for
|
||||||
|
# itself. Use any sensible prefix in 172.16.0.0/12 here except for Docker's
|
||||||
|
# own 172.17.0.0/16.
|
||||||
|
# ---
|
||||||
|
SUBNET=172.30.95.0/24
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# See 'compose.override.yaml' for how to make a variable available in
|
||||||
|
# a Dockerfile
|
||||||
|
# ---
|
||||||
|
# EXAMPLE_ARG_FROM_ENV_FILE=must_be_available_in_dockerfile
|
@@ -1,29 +0,0 @@
|
|||||||
CONTEXT=fsf
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Set something sensible here and uncomment
|
|
||||||
# ---
|
|
||||||
# VAULT_VERSION=x.y.z
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# A ${LOCATION} var is usually not needed. It may be helpful when a ${CONTEXT}
|
|
||||||
# extends over more than one location e.g. to bind-mount location-specific
|
|
||||||
# config files or certificates into a container.
|
|
||||||
# ---
|
|
||||||
# LOCATION=
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Feel free to leave defaults. They apply while these vars are commented out
|
|
||||||
# ---
|
|
||||||
# RESTARTPOLICY=unless-stopped
|
|
||||||
# TIMEZONE=Etc/UTC
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# See 'docker-compose.override.yml' for how to make a variable available in
|
|
||||||
# a Dockerfile
|
|
||||||
# ---
|
|
||||||
# EXAMPLE_ARG_FROM_ENV_FILE=must_be_available_in_dockerfile
|
|
@@ -2,8 +2,7 @@ import sys
|
|||||||
|
|
||||||
service_slug = "{{ cookiecutter.__service_slug }}"
|
service_slug = "{{ cookiecutter.__service_slug }}"
|
||||||
component_list_slug = "{{ cookiecutter.__component_list_slug }}"
|
component_list_slug = "{{ cookiecutter.__component_list_slug }}"
|
||||||
context_slug = "{{ cookiecutter.__context_slug }}"
|
for v in (service_slug, component_list_slug):
|
||||||
for v in (service_slug, component_list_slug, context_slug):
|
|
||||||
if not v:
|
if not v:
|
||||||
print(f"Please answer all prompts with a non-empty string. Aborting and existing 3 ...")
|
print(f"Please answer all prompts with a non-empty string. Aborting and exiting 3 ...")
|
||||||
sys.exit(3)
|
sys.exit(3)
|
||||||
|
228
docker-compose/{{ cookiecutter.__project_slug }}/README.md
Normal file
228
docker-compose/{{ cookiecutter.__project_slug }}/README.md
Normal file
@@ -0,0 +1,228 @@
|
|||||||
|
# FIXME
|
||||||
|
|
||||||
|
Search and replace all mentions of FIXME with sensible content in this file and in [compose.yaml](compose.yaml).
|
||||||
|
|
||||||
|
# {{ cookiecutter.__service_slug.capitalize() }} Docker Compose files
|
||||||
|
|
||||||
|
Docker Compose files to spin up an instance of {{ cookiecutter.__service_slug.capitalize() }} FIXME capitalization FIXME.
|
||||||
|
|
||||||
|
# How to run
|
||||||
|
|
||||||
|
Add a `COMPOSE_ENV` file and save its location as a shell variable along with the location where this repo lives, here for example `/opt/containers/{{ cookiecutter.__project_slug }}` plus all other variables. At [env/fqdn_context.env.example](env/fqdn_context.env.example) you'll find an example environment file.
|
||||||
|
|
||||||
|
When everything's ready start {{ cookiecutter.__service_slug.capitalize() }} with Docker Compose, otherwise head down to [Initial setup](#initial-setup) first.
|
||||||
|
|
||||||
|
## Environment
|
||||||
|
|
||||||
|
```
|
||||||
|
export COMPOSE_DIR='/opt/containers/{{ cookiecutter.__project_slug }}'
|
||||||
|
export COMPOSE_CTX='ux_vilnius'
|
||||||
|
export COMPOSE_PROJECT='{{ cookiecutter.__service_slug }}-'"${COMPOSE_CTX}"
|
||||||
|
export COMPOSE_FILE="${COMPOSE_DIR}"'/compose.yaml'{% if cookiecutter.build == "yes" %}
|
||||||
|
export COMPOSE_OVERRIDE="${COMPOSE_DIR%/}"'/compose.override.yaml'{% endif %}
|
||||||
|
export COMPOSE_ENV=<add accordingly>
|
||||||
|
```
|
||||||
|
|
||||||
|
## Context
|
||||||
|
|
||||||
|
On your deployment machine create the necessary Docker context to connect to and control the Docker daemon on whatever target host you'll be using, for example:
|
||||||
|
```
|
||||||
|
docker context create fully.qualified.domain.name --docker 'host=ssh://root@fully.qualified.domain.name'
|
||||||
|
```
|
||||||
|
|
||||||
|
{%- if cookiecutter.build == "yes" %}
|
||||||
|
|
||||||
|
## Build
|
||||||
|
{% set components = cookiecutter.__component_list_slug.split(',') -%}
|
||||||
|
{%- for component in components %}
|
||||||
|
{%- if loop.first %}
|
||||||
|
> Skip to [Pull](#pull) if you already have images in your private registry ready to use. Otherwise read on to build them now.
|
||||||
|
|
||||||
|
FIXME We build the `{{ cookiecutter.__service_slug }}` image locally. Our adjustment to the official image is simply adding `/tmp/{{ cookiecutter.__service_slug }}` to it. See {% if ',' in cookiecutter.__component_list_slug %}[build-context/{{ cookiecutter.__service_slug }}/Dockerfile](build-context/{{ cookiecutter.__service_slug }}/Dockerfile){%- else %}[build-context/Dockerfile](build-context/Dockerfile){%- endif %}. We use `/tmp/{{ cookiecutter.__service_slug }}` to bind-mount a dedicated ZFS dataset for the application's `tmpdir` location.
|
||||||
|
|
||||||
|
```
|
||||||
|
docker compose --project-name "${COMPOSE_PROJECT}" --file "${COMPOSE_FILE}" --file "${COMPOSE_OVERRIDE}" --env-file "${COMPOSE_ENV}" --profile 'build{% if ',' in cookiecutter.__component_list_slug %}-{{ cookiecutter.__service_slug }}{%- endif %}' build
|
||||||
|
```
|
||||||
|
{%- endif %}
|
||||||
|
{% endfor %}
|
||||||
|
## Push
|
||||||
|
|
||||||
|
Push to Docker Hub or your private registry. Setting up a private registry is out of scope of this repo. Once you have a registry available you can use it like so:
|
||||||
|
- On your OS install a Docker credential helper per [github.com/docker/docker-credential-helpers](https://github.com/docker/docker-credential-helpers). This will make sure you won't store credentials hashed (and unencrypted) in `~/.docker/config.json`. On an example Arch Linux machine where D-Bus Secret Service exists this will come via something like the [docker-credential-secretservice-bin](https://aur.archlinux.org/packages/docker-credential-secretservice-bin) Arch User Repository package. Just install and you're done.
|
||||||
|
- Do a `docker login registry.example.com`, enter username and password, confirm login.
|
||||||
|
|
||||||
|
```
|
||||||
|
source "${COMPOSE_ENV}"
|
||||||
|
{%- set components = cookiecutter.__component_list_slug.split(',') -%}
|
||||||
|
{%- if ',' in cookiecutter.__component_list_slug %}
|
||||||
|
for image in{% for component in components %} \
|
||||||
|
'{%- if cookiecutter.build == "yes" -%}{%- if loop.first -%}registry.example.com/project/{%- endif -%}{%- endif -%}{{ component }}:'"{%- if cookiecutter.build == "yes" -%}{%- if loop.first -%}${% raw %}{{% endraw %}{{ component.upper() }}_BUILD_DATE{% raw %}}{% endraw %}-{%- endif -%}{%- endif -%}${% raw %}{{% endraw %}{{ component.upper() }}_VERSION{% raw %}}{% endraw %}"{%- endfor %}; do
|
||||||
|
docker push 'registry.example.com/project/'"${image}"
|
||||||
|
done
|
||||||
|
{%- else %}
|
||||||
|
docker push "{%- if cookiecutter.build == "yes" -%}registry.example.com/project/{%- endif -%}{{ cookiecutter.__component_list_slug }}:{%- if cookiecutter.build == "yes" -%}${% raw %}{{% endraw %}{{ cookiecutter.__component_list_slug.upper() }}_BUILD_DATE{% raw %}}{% endraw %}-{%- endif -%}${% raw %}{{% endraw %}{{ cookiecutter.__component_list_slug.upper() }}_VERSION{% raw %}}{% endraw %}"
|
||||||
|
{%- endif %}
|
||||||
|
```
|
||||||
|
{%- endif %}
|
||||||
|
|
||||||
|
## Pull
|
||||||
|
|
||||||
|
{% if cookiecutter.build == "yes" %}> Skip this step if you just built images that still exist locally on your build host.
|
||||||
|
|
||||||
|
FIXME Rewrite either [Build](#build) or this paragraph for which images are built and which ones pulled, `--profile 'full'` may not make sense.{% else %}Pull images from Docker Hub verbatim.{% endif %}
|
||||||
|
|
||||||
|
```
|
||||||
|
docker compose --project-name "${COMPOSE_PROJECT}" --file "${COMPOSE_FILE}" --env-file "${COMPOSE_ENV}" --profile 'full' pull
|
||||||
|
```
|
||||||
|
|
||||||
|
## Copy to target
|
||||||
|
|
||||||
|
Copy images to target Docker host, that is assuming you deploy to a machine that itself has no network route to reach Docker Hub or your private registry of choice. Copying in its simplest form involves a local `docker save` and a remote `docker load`. Consider the helper mini-project [quico.space/Quico/copy-docker](https://quico.space/Quico/copy-docker) where [copy-docker.sh](https://quico.space/Quico/copy-docker/src/branch/main/copy-docker.sh) allows the following workflow:
|
||||||
|
|
||||||
|
```
|
||||||
|
source "${COMPOSE_ENV}"
|
||||||
|
# FIXME Docker Hub image name with or without slash? FIXME
|
||||||
|
{%- set components = cookiecutter.__component_list_slug.split(',') -%}
|
||||||
|
{%- if ',' in cookiecutter.__component_list_slug %}
|
||||||
|
for image in{% for component in components %} '{{ component }}:'"${% raw %}{{% endraw %}{{ component.upper() }}_VERSION{% raw %}}{% endraw %}"{%- endfor %}; do
|
||||||
|
copy-docker "${image}" fully.qualified.domain.name
|
||||||
|
done
|
||||||
|
{%- else %}
|
||||||
|
copy-docker '{{ cookiecutter.__component_list_slug }}:'"${% raw %}{{% endraw %}{{ cookiecutter.__component_list_slug.upper() }}_VERSION{% raw %}}{% endraw %}" fully.qualified.domain.name
|
||||||
|
{%- endif %}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Start
|
||||||
|
|
||||||
|
{%- if ',' in cookiecutter.__component_list_slug %}
|
||||||
|
|
||||||
|
FIXME Does the service use a virtual IP address? FIXME
|
||||||
|
|
||||||
|
Make sure your service's virtual IP address is bound on your target host then start containers.
|
||||||
|
|
||||||
|
{%- endif %}
|
||||||
|
|
||||||
|
```
|
||||||
|
{%- if ',' in cookiecutter.__component_list_slug %}
|
||||||
|
docker --context 'fully.qualified.domain.name' compose --project-name "${COMPOSE_PROJECT}" --file "${COMPOSE_FILE}" --env-file "${COMPOSE_ENV}" --profile 'full' up --detach
|
||||||
|
{%- else %}
|
||||||
|
docker --context 'fully.qualified.domain.name' compose --project-name "${COMPOSE_PROJECT}" --file "${COMPOSE_FILE}" --env-file "${COMPOSE_ENV}" up --detach
|
||||||
|
{%- endif %}
|
||||||
|
```
|
||||||
|
|
||||||
|
# Initial setup
|
||||||
|
|
||||||
|
We're assuming you run Docker Compose workloads with ZFS-based bind mounts. ZFS management, creating a zpool and setting adequate properties for its datasets is out of scope of this document.
|
||||||
|
|
||||||
|
## Datasets
|
||||||
|
|
||||||
|
Create ZFS datasets and set permissions as needed.
|
||||||
|
|
||||||
|
* Parent dateset
|
||||||
|
```
|
||||||
|
zfs create -o mountpoint=/opt/docker-data 'zpool/docker-data'
|
||||||
|
```
|
||||||
|
|
||||||
|
* Container-specific datasets
|
||||||
|
```
|
||||||
|
{%- if ',' in cookiecutter.__component_list_slug -%}
|
||||||
|
{%- set components = cookiecutter.__component_list_slug.split(',') -%}
|
||||||
|
{%- for component in components %}
|
||||||
|
zfs create -p 'zpool/docker-data/{{ cookiecutter.__service_slug }}-'"${COMPOSE_CTX}"'/{{ component }}/data/db'
|
||||||
|
zfs create -p 'zpool/docker-data/{{ cookiecutter.__service_slug }}-'"${COMPOSE_CTX}"'/{{ component }}/data/logs'
|
||||||
|
zfs create -p 'zpool/docker-data/{{ cookiecutter.__service_slug }}-'"${COMPOSE_CTX}"'/{{ component }}/config'
|
||||||
|
{%- endfor -%}
|
||||||
|
{%- else %}
|
||||||
|
zfs create -p 'zpool/docker-data/{{ cookiecutter.__service_slug }}-'"${COMPOSE_CTX}"'/{{ cookiecutter.__service_slug }}/data/db'
|
||||||
|
zfs create -p 'zpool/docker-data/{{ cookiecutter.__service_slug }}-'"${COMPOSE_CTX}"'/{{ cookiecutter.__service_slug }}/data/logs'
|
||||||
|
zfs create -p 'zpool/docker-data/{{ cookiecutter.__service_slug }}-'"${COMPOSE_CTX}"'/{{ cookiecutter.__service_slug }}/config'
|
||||||
|
{%- endif %}
|
||||||
|
```
|
||||||
|
FIXME When changing bind mount locations to real ones remember to also update `volumes:` in [compose.yaml](compose.yaml) FIXME
|
||||||
|
|
||||||
|
* Create subdirs
|
||||||
|
```
|
||||||
|
{%- set components = cookiecutter.__component_list_slug.split(',') -%}
|
||||||
|
{% for component in components %}
|
||||||
|
{%- if loop.first %}
|
||||||
|
mkdir -p '/opt/docker-data/{{ cookiecutter.__service_slug }}-'"${COMPOSE_CTX}"'/{{ cookiecutter.__service_slug }}/'{'.ssh','config','data','projects'}
|
||||||
|
{%- endif %}
|
||||||
|
{%- endfor %}
|
||||||
|
```
|
||||||
|
|
||||||
|
* Change ownership
|
||||||
|
```
|
||||||
|
{%- set components = cookiecutter.__component_list_slug.split(',') -%}
|
||||||
|
{% for component in components %}
|
||||||
|
{%- if loop.first %}
|
||||||
|
chown -R 1000:1000 '/opt/docker-data/{{ cookiecutter.__service_slug }}-'"${COMPOSE_CTX}"'/{{ cookiecutter.__service_slug }}/data/'*
|
||||||
|
{%- endif %}
|
||||||
|
{%- endfor %}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Additional files
|
||||||
|
|
||||||
|
Place the following files on target server. Use the directory structure at [build-context](build-context) as a guide, specifically at `docker-data`.
|
||||||
|
|
||||||
|
FIXME Add details about files that aren't self-explanatory FIXME
|
||||||
|
|
||||||
|
```
|
||||||
|
build-context/
|
||||||
|
{%- if ',' in cookiecutter.__component_list_slug -%}
|
||||||
|
{%- set components = cookiecutter.__component_list_slug.split(',') -%}
|
||||||
|
{%- for component in components %}
|
||||||
|
{%- if not loop.last %}
|
||||||
|
├── {{ component }}
|
||||||
|
│ ├── docker-data
|
||||||
|
│ | └── config
|
||||||
|
│ │ └── {{ component }}.cfg
|
||||||
|
│ ├── ...
|
||||||
|
│ └── ...
|
||||||
|
{%- else %}
|
||||||
|
└── {{ component }}
|
||||||
|
├── docker-data
|
||||||
|
| └── config
|
||||||
|
│ └── {{ component }}.cfg
|
||||||
|
├── ...
|
||||||
|
└── ...
|
||||||
|
{%- endif %}
|
||||||
|
{%- endfor %}
|
||||||
|
{%- else %}
|
||||||
|
├── docker-data
|
||||||
|
│ └── config
|
||||||
|
│ └── {{ cookiecutter.__service_slug }}.cfg
|
||||||
|
├── ...
|
||||||
|
└── ...
|
||||||
|
{%- endif %}
|
||||||
|
```
|
||||||
|
|
||||||
|
When done head back up to [How to run](#how-to-run).
|
||||||
|
|
||||||
|
# Development
|
||||||
|
|
||||||
|
## Conventional commits
|
||||||
|
|
||||||
|
This project uses [Conventional Commits](https://www.conventionalcommits.org/) for its commit messages.
|
||||||
|
|
||||||
|
### Commit types
|
||||||
|
|
||||||
|
Commit _types_ besides `fix` and `feat` are:
|
||||||
|
|
||||||
|
- `refactor`: Keeping functionality while streamlining or otherwise improving function flow
|
||||||
|
- `docs`: Documentation for project or components
|
||||||
|
|
||||||
|
### Commit scopes
|
||||||
|
|
||||||
|
The following _scopes_ are known for this project. A Conventional Commits commit message may optionally use one of the following scopes or none:
|
||||||
|
{%if ',' in cookiecutter.__component_list_slug -%}
|
||||||
|
{%- set components = cookiecutter.__component_list_slug.split(',') -%}
|
||||||
|
{%- for component in components %}
|
||||||
|
- `{{ component }}`: A change to how the `{{ component }}` service component works
|
||||||
|
{%- endfor -%}
|
||||||
|
{%- else %}
|
||||||
|
- `{{ cookiecutter.__service_slug }}`: A change to how the `{{ cookiecutter.__service_slug }}` service component works
|
||||||
|
{%- endif %}
|
||||||
|
- `build`: Build-related changes such as `Dockerfile` fixes and features.
|
||||||
|
- `mount`: Volume or bind mount-related changes.
|
||||||
|
- `net`: Networking, IP addressing, routing changes
|
||||||
|
- `meta`: Affects the project's repo layout, file names etc.
|
@@ -1,6 +1,6 @@
|
|||||||
# For the remainder of this Dockerfile EXAMPLE_ARG_FOR_DOCKERFILE will be
|
# For the remainder of this Dockerfile EXAMPLE_ARG_FOR_DOCKERFILE will be
|
||||||
# available with a value of 'must_be_available_in_dockerfile', check out the env
|
# available with a value of 'must_be_available_in_dockerfile', check out the env
|
||||||
# file at 'env/fully.qualified.domain.name.example' for reference.
|
# file at 'env/fqdn_context.env.example' for reference.
|
||||||
# ARG EXAMPLE_ARG_FOR_DOCKERFILE
|
# ARG EXAMPLE_ARG_FOR_DOCKERFILE
|
||||||
|
|
||||||
# Another env var, this one's needed in the example build step below:
|
# Another env var, this one's needed in the example build step below:
|
||||||
|
@@ -0,0 +1,27 @@
|
|||||||
|
services:
|
||||||
|
{%- if ',' in cookiecutter.__component_list_slug -%}
|
||||||
|
{%- set components = cookiecutter.__component_list_slug.split(',') -%}
|
||||||
|
{% for component in components %}
|
||||||
|
{{ component }}-build:
|
||||||
|
# FIXME image name with or without slash? Docker Hub or private registry? With or without *_BUILD_DATE? FIXME
|
||||||
|
image: "{%- if cookiecutter.build == "yes" -%}{%- if loop.first -%}registry.example.com/project/{%- endif -%}{%- endif -%}{{ component }}:{%- if cookiecutter.build == "yes" -%}{%- if loop.first -%}${% raw %}{{% endraw %}{{ component.upper() }}_BUILD_DATE{% raw %}}{% endraw %}-{%- endif -%}{%- endif -%}${% raw %}{{% endraw %}{{ component.upper() }}_VERSION{% raw %}}{% endraw %}"
|
||||||
|
profiles: ["build", "build-{{ component }}"]
|
||||||
|
build:
|
||||||
|
context: "build-context/{{ component }}"
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
args:
|
||||||
|
EXAMPLE_ARG_FOR_DOCKERFILE: "${EXAMPLE_ARG_FROM_ENV_FILE}"
|
||||||
|
{{ component.upper() }}_VERSION: "${% raw %}{{% endraw %}{{ component.upper() }}_VERSION{% raw %}}{% endraw %}"
|
||||||
|
{%- endfor %}
|
||||||
|
{%- else %}
|
||||||
|
{{ cookiecutter.__component_list_slug }}-build:
|
||||||
|
# FIXME image name with or without slash? Docker Hub or private registry? With or without *_BUILD_DATE? FIXME
|
||||||
|
image: "{%- if cookiecutter.build == "yes" -%}registry.example.com/project/{%- endif -%}{{ cookiecutter.__component_list_slug }}:{%- if cookiecutter.build == "yes" -%}${% raw %}{{% endraw %}{{ cookiecutter.__component_list_slug.upper() }}_BUILD_DATE{% raw %}}{% endraw %}-{%- endif -%}${% raw %}{{% endraw %}{{ cookiecutter.__component_list_slug.upper() }}_VERSION{% raw %}}{% endraw %}"
|
||||||
|
profiles: ["build"]
|
||||||
|
build:
|
||||||
|
context: "build-context"
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
args:
|
||||||
|
EXAMPLE_ARG_FOR_DOCKERFILE: "${EXAMPLE_ARG_FROM_ENV_FILE}"
|
||||||
|
{{ cookiecutter.__component_list_slug.upper() }}_VERSION: "${% raw %}{{% endraw %}{{ cookiecutter.__component_list_slug.upper() }}_VERSION{% raw %}}{% endraw %}"
|
||||||
|
{%- endif %}
|
@@ -0,0 +1,89 @@
|
|||||||
|
services:
|
||||||
|
{%- if ',' in cookiecutter.__component_list_slug -%}
|
||||||
|
{%- set components = cookiecutter.__component_list_slug.split(',') -%}
|
||||||
|
{%- set ns = namespace(found=false) -%}
|
||||||
|
{%- for component in components %}
|
||||||
|
{%- if loop.first -%}
|
||||||
|
{%- set ns.first_component = component -%}
|
||||||
|
{%- elif N is undefined -%}
|
||||||
|
{%- set ns.second_component = component -%}
|
||||||
|
{%- set N = 0 -%}
|
||||||
|
{%- endif -%}
|
||||||
|
{%- endfor -%}
|
||||||
|
{%- for component in components %}
|
||||||
|
{{ component }}:
|
||||||
|
# FIXME image name with or without slash? Docker Hub or private registry? With or without *_BUILD_DATE? FIXME
|
||||||
|
image: "{%- if cookiecutter.build == "yes" -%}{%- if loop.first -%}registry.example.com/project/{%- endif -%}{%- endif -%}{{ component }}:{%- if cookiecutter.build == "yes" -%}{%- if loop.first -%}${% raw %}{{% endraw %}{{ component.upper() }}_BUILD_DATE{% raw %}}{% endraw %}-{%- endif -%}{%- endif -%}${% raw %}{{% endraw %}{{ component.upper() }}_VERSION{% raw %}}{% endraw %}"
|
||||||
|
container_name: "{{ cookiecutter.__service_slug }}-{{ component }}-${CONTEXT}"
|
||||||
|
networks:
|
||||||
|
{{ cookiecutter.__service_slug }}-default:
|
||||||
|
profiles: ["full", "{{ component }}"]
|
||||||
|
{% if loop.first -%}
|
||||||
|
depends_on:
|
||||||
|
{{ ns.second_component }}:
|
||||||
|
condition: service_healthy
|
||||||
|
{%- else -%}
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "fping", "--count=1", "${% raw %}{{% endraw %}{{ ns.first_component.upper() }}_VIP{% raw %}}{% endraw %}", "--period=500", "--quiet"]
|
||||||
|
interval: 3s
|
||||||
|
timeout: 1s
|
||||||
|
retries: 60
|
||||||
|
start_period: 2s
|
||||||
|
{%- endif %}
|
||||||
|
ulimits:
|
||||||
|
nproc: ${ULIMIT_NPROC:-65535}
|
||||||
|
nofile:
|
||||||
|
soft: ${ULIMIT_NPROC:-65535}
|
||||||
|
hard: ${ULIMIT_NPROC:-65535}
|
||||||
|
extends:
|
||||||
|
file: common-settings.yaml
|
||||||
|
service: common-settings
|
||||||
|
ports:
|
||||||
|
# - "8080:80"
|
||||||
|
volumes:
|
||||||
|
# When changing bind mount locations to real ones remember to
|
||||||
|
# also update "Initial setup" section in README.md.
|
||||||
|
# - /opt/docker-data/{{ cookiecutter.__service_slug }}-${CONTEXT}/{{ component }}/data/db:/usr/lib/{{ component }}
|
||||||
|
# - /opt/docker-data/{{ cookiecutter.__service_slug }}-${CONTEXT}/{{ component }}/data/logs:/var/log/{{ component }}
|
||||||
|
# - /opt/docker-data/{{ cookiecutter.__service_slug }}-${CONTEXT}/{{ component }}/config:/etc/{{ component }}
|
||||||
|
environment:
|
||||||
|
# {{ component.upper() }}_USER: ${% raw %}{{% endraw %}{{ component.upper() }}_USER{% raw %}}{% endraw %}
|
||||||
|
# {{ component.upper() }}_PASSWORD: ${% raw %}{{% endraw %}{{ component.upper() }}_PASSWORD{% raw %}}{% endraw %}
|
||||||
|
{%- endfor -%}
|
||||||
|
{%- else %}
|
||||||
|
{{ cookiecutter.__component_list_slug }}:
|
||||||
|
# FIXME image name with or without slash? Docker Hub or private registry? With or without *_BUILD_DATE? FIXME
|
||||||
|
image: "{%- if cookiecutter.build == "yes" -%}registry.example.com/project/{%- endif -%}{{ cookiecutter.__component_list_slug }}:{%- if cookiecutter.build == "yes" -%}${% raw %}{{% endraw %}{{ cookiecutter.__component_list_slug.upper() }}_BUILD_DATE{% raw %}}{% endraw %}-{%- endif -%}${% raw %}{{% endraw %}{{ cookiecutter.__component_list_slug.upper() }}_VERSION{% raw %}}{% endraw %}"
|
||||||
|
container_name: "{{ cookiecutter.__service_slug }}-${CONTEXT}"
|
||||||
|
networks:
|
||||||
|
{{ cookiecutter.__service_slug }}-default:
|
||||||
|
ulimits:
|
||||||
|
nproc: ${ULIMIT_NPROC:-65535}
|
||||||
|
nofile:
|
||||||
|
soft: ${ULIMIT_NPROC:-65535}
|
||||||
|
hard: ${ULIMIT_NPROC:-65535}
|
||||||
|
extends:
|
||||||
|
file: common-settings.yaml
|
||||||
|
service: common-settings
|
||||||
|
ports:
|
||||||
|
# - "8080:80"
|
||||||
|
volumes:
|
||||||
|
# When changing bind mount locations to real ones remember to
|
||||||
|
# also update "Initial setup" section in README.md.
|
||||||
|
# - /opt/docker-data/{{ cookiecutter.__service_slug }}-${CONTEXT}/{{ cookiecutter.__service_slug }}/data/db:/usr/lib/{{ cookiecutter.__service_slug }}
|
||||||
|
# - /opt/docker-data/{{ cookiecutter.__service_slug }}-${CONTEXT}/{{ cookiecutter.__service_slug }}/data/logs:/var/log/{{ cookiecutter.__service_slug }}
|
||||||
|
# - /opt/docker-data/{{ cookiecutter.__service_slug }}-${CONTEXT}/{{ cookiecutter.__service_slug }}/config:/etc/{{ cookiecutter.__service_slug }}
|
||||||
|
environment:
|
||||||
|
# {{ cookiecutter.__component_list_slug.upper() }}_USER: ${% raw %}{{% endraw %}{{ cookiecutter.__component_list_slug.upper() }}_USER{% raw %}}{% endraw %}
|
||||||
|
# {{ cookiecutter.__component_list_slug.upper() }}_PASSWORD: ${% raw %}{{% endraw %}{{ cookiecutter.__component_list_slug.upper() }}_PASSWORD{% raw %}}{% endraw %}
|
||||||
|
{%- endif %}
|
||||||
|
networks:
|
||||||
|
{{ cookiecutter.__service_slug }}-default:
|
||||||
|
name: {{ cookiecutter.__service_slug }}-${CONTEXT}
|
||||||
|
driver: bridge
|
||||||
|
driver_opts:
|
||||||
|
com.docker.network.enable_ipv6: "false"
|
||||||
|
ipam:
|
||||||
|
driver: default
|
||||||
|
config:
|
||||||
|
- subnet: ${SUBNET}
|
@@ -1,25 +0,0 @@
|
|||||||
services:
|
|
||||||
{%- if ',' in cookiecutter.__component_list_slug -%}
|
|
||||||
{%- set components = cookiecutter.__component_list_slug.split(',') -%}
|
|
||||||
{% for component in components %}
|
|
||||||
{{ component }}-build:
|
|
||||||
image: "{{ component }}:${% raw %}{{% endraw %}{{ component.upper() }}_VERSION{% raw %}}{% endraw %}"
|
|
||||||
profiles: ["build", "build-{{ component }}"]
|
|
||||||
build:
|
|
||||||
context: "build-context/{{ component }}"
|
|
||||||
dockerfile: Dockerfile
|
|
||||||
args:
|
|
||||||
EXAMPLE_ARG_FOR_DOCKERFILE: "${EXAMPLE_ARG_FROM_ENV_FILE}"
|
|
||||||
{{ component.upper() }}_VERSION: "${% raw %}{{% endraw %}{{ component.upper() }}_VERSION{% raw %}}{% endraw %}"
|
|
||||||
{%- endfor %}
|
|
||||||
{%- else %}
|
|
||||||
{{ cookiecutter.__component_list_slug }}-build:
|
|
||||||
image: "{{ cookiecutter.__component_list_slug }}:${% raw %}{{% endraw %}{{ cookiecutter.__component_list_slug.upper() }}_VERSION{% raw %}}{% endraw %}"
|
|
||||||
profiles: ["build"]
|
|
||||||
build:
|
|
||||||
context: "build-context/{{ cookiecutter.__component_list_slug }}"
|
|
||||||
dockerfile: Dockerfile
|
|
||||||
args:
|
|
||||||
EXAMPLE_ARG_FOR_DOCKERFILE: "${EXAMPLE_ARG_FROM_ENV_FILE}"
|
|
||||||
{{ cookiecutter.__component_list_slug.upper() }}_VERSION: "${% raw %}{{% endraw %}{{ cookiecutter.__component_list_slug.upper() }}_VERSION{% raw %}}{% endraw %}"
|
|
||||||
{%- endif %}
|
|
@@ -1,52 +0,0 @@
|
|||||||
services:
|
|
||||||
{%- if ',' in cookiecutter.__component_list_slug -%}
|
|
||||||
{%- set components = cookiecutter.__component_list_slug.split(',') -%}
|
|
||||||
{%- for component in components %}
|
|
||||||
{{ component }}:
|
|
||||||
image: "{{ component }}:${% raw %}{{% endraw %}{{ component.upper() }}_VERSION{% raw %}}{% endraw %}"
|
|
||||||
container_name: "{{ cookiecutter.__service_slug }}-{{ component }}-${CONTEXT}"
|
|
||||||
networks:
|
|
||||||
{{ cookiecutter.__service_slug }}-default:
|
|
||||||
profiles: ["full", "{{ component }}"]
|
|
||||||
extends:
|
|
||||||
file: common-settings.yml
|
|
||||||
service: common-settings
|
|
||||||
ports:
|
|
||||||
# - "8080:80"
|
|
||||||
volumes:
|
|
||||||
# - /opt/docker-data/{{ cookiecutter.__service_slug }}-{{ component }}-${CONTEXT}/{{ component }}/data/db:/usr/lib/{{ component }}
|
|
||||||
# - /opt/docker-data/{{ cookiecutter.__service_slug }}-{{ component }}-${CONTEXT}/{{ component }}/data/logs:/var/log/{{ component }}
|
|
||||||
# - /opt/docker-data/{{ cookiecutter.__service_slug }}-{{ component }}-${CONTEXT}/{{ component }}/config:/etc/{{ component }}
|
|
||||||
environment:
|
|
||||||
# {{ component.upper() }}_USER: ${% raw %}{{% endraw %}{{ component.upper() }}_USER{% raw %}}{% endraw %}
|
|
||||||
# {{ component.upper() }}_PASSWORD: ${% raw %}{{% endraw %}{{ component.upper() }}_PASSWORD{% raw %}}{% endraw %}
|
|
||||||
{%- endfor -%}
|
|
||||||
{%- else %}
|
|
||||||
{{ cookiecutter.__component_list_slug }}:
|
|
||||||
image: "{{ cookiecutter.__component_list_slug }}:${% raw %}{{% endraw %}{{ cookiecutter.__component_list_slug.upper() }}_VERSION{% raw %}}{% endraw %}"
|
|
||||||
container_name: "{{ cookiecutter.__service_slug }}-${CONTEXT}"
|
|
||||||
networks:
|
|
||||||
{{ cookiecutter.__service_slug }}-default:
|
|
||||||
extends:
|
|
||||||
file: common-settings.yml
|
|
||||||
service: common-settings
|
|
||||||
ports:
|
|
||||||
# - "8080:80"
|
|
||||||
volumes:
|
|
||||||
# - /opt/docker-data/{{ cookiecutter.__service_slug }}-${CONTEXT}/data/db:/usr/lib/{{ cookiecutter.__service_slug }}
|
|
||||||
# - /opt/docker-data/{{ cookiecutter.__service_slug }}-${CONTEXT}/data/logs:/var/log/{{ cookiecutter.__service_slug }}
|
|
||||||
# - /opt/docker-data/{{ cookiecutter.__service_slug }}-${CONTEXT}/config:/etc/{{ cookiecutter.__service_slug }}
|
|
||||||
environment:
|
|
||||||
# {{ cookiecutter.__component_list_slug.upper() }}_USER: ${% raw %}{{% endraw %}{{ cookiecutter.__component_list_slug.upper() }}_USER{% raw %}}{% endraw %}
|
|
||||||
# {{ cookiecutter.__component_list_slug.upper() }}_PASSWORD: ${% raw %}{{% endraw %}{{ cookiecutter.__component_list_slug.upper() }}_PASSWORD{% raw %}}{% endraw %}
|
|
||||||
{%- endif %}
|
|
||||||
networks:
|
|
||||||
{{ cookiecutter.__service_slug }}-default:
|
|
||||||
name: {{ cookiecutter.__service_slug }}-${CONTEXT}
|
|
||||||
driver: bridge
|
|
||||||
driver_opts:
|
|
||||||
com.docker.network.enable_ipv6: "false"
|
|
||||||
ipam:
|
|
||||||
driver: default
|
|
||||||
config:
|
|
||||||
# - subnet: 172.21.184.0/24
|
|
40
docker-compose/{{ cookiecutter.__project_slug }}/env/fqdn_context.env.example
vendored
Normal file
40
docker-compose/{{ cookiecutter.__project_slug }}/env/fqdn_context.env.example
vendored
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
CONTEXT=ux_vilnius
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# Set something sensible here and uncomment
|
||||||
|
# ---
|
||||||
|
{%- set components = cookiecutter.__component_list_slug.split(',') -%}
|
||||||
|
{% for component in components %}
|
||||||
|
# {{ component.upper() }}_VERSION=x.y.z
|
||||||
|
{%- endfor %}
|
||||||
|
{%- for component in components %}
|
||||||
|
{%- if loop.first %}
|
||||||
|
# {{ component.upper() }}_VIP=10.1.1.2
|
||||||
|
# {{ component.upper() }}_BUILD_DATE=20230731
|
||||||
|
{%- endif %}
|
||||||
|
{%- endfor %}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# Feel free to leave defaults. They apply while these vars are commented out
|
||||||
|
# ---
|
||||||
|
# RESTARTPOLICY=unless-stopped
|
||||||
|
# TIMEZONE=Etc/UTC
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# Subnet to use for this Docker Compose project. Docker defaults to
|
||||||
|
# container networks in prefix 172.16.0.0/12 which is 1 million addresses in
|
||||||
|
# the range from 172.16.0.0 to 172.31.255.255. Docker uses 172.17.0.0/16 for
|
||||||
|
# itself. Use any sensible prefix in 172.16.0.0/12 here except for Docker's
|
||||||
|
# own 172.17.0.0/16.
|
||||||
|
# ---
|
||||||
|
SUBNET=172.30.95.0/24
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# See 'compose.override.yaml' for how to make a variable available in
|
||||||
|
# a Dockerfile
|
||||||
|
# ---
|
||||||
|
# EXAMPLE_ARG_FROM_ENV_FILE=must_be_available_in_dockerfile
|
@@ -1,32 +0,0 @@
|
|||||||
CONTEXT={{ cookiecutter.__context_slug }}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Set something sensible here and uncomment
|
|
||||||
# ---
|
|
||||||
{%- set components = cookiecutter.__component_list_slug.split(',') -%}
|
|
||||||
{% for component in components %}
|
|
||||||
# {{ component.upper() }}_VERSION=x.y.z
|
|
||||||
{%- endfor %}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# A ${LOCATION} var is usually not needed. It may be helpful when a ${CONTEXT}
|
|
||||||
# extends over more than one location e.g. to bind-mount location-specific
|
|
||||||
# config files or certificates into a container.
|
|
||||||
# ---
|
|
||||||
# LOCATION=
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Feel free to leave defaults. They apply while these vars are commented out
|
|
||||||
# ---
|
|
||||||
# RESTARTPOLICY=unless-stopped
|
|
||||||
# TIMEZONE=Etc/UTC
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# See 'docker-compose.override.yml' for how to make a variable available in
|
|
||||||
# a Dockerfile
|
|
||||||
# ---
|
|
||||||
# EXAMPLE_ARG_FROM_ENV_FILE=must_be_available_in_dockerfile
|
|
65
python-naive/README.md
Normal file
65
python-naive/README.md
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
# Naive Python template
|
||||||
|
|
||||||
|
## Run it
|
||||||
|
|
||||||
|
Execute this template like so:
|
||||||
|
```
|
||||||
|
cookiecutter https://quico.space/Quico/py-cookiecutter-templates.git --directory 'python-naive'
|
||||||
|
```
|
||||||
|
|
||||||
|
Cookiecutter interactively prompts you for the following info, here with example answers:
|
||||||
|
```
|
||||||
|
project_slug [project-slug]: update-firewall-source
|
||||||
|
Select use_rich_logging:
|
||||||
|
1 - yes
|
||||||
|
2 - no
|
||||||
|
Choose from 1, 2 [1]:
|
||||||
|
Select use_config_ini:
|
||||||
|
1 - yes
|
||||||
|
2 - no
|
||||||
|
Choose from 1, 2 [1]:
|
||||||
|
Select use_inflect:
|
||||||
|
1 - yes
|
||||||
|
2 - no
|
||||||
|
Choose from 1, 2 [1]:
|
||||||
|
```
|
||||||
|
|
||||||
|
Done, directory structure and files for your next Python project are ready for you to hit the ground running.
|
||||||
|
|
||||||
|
## Explanation and terminology
|
||||||
|
|
||||||
|
Your answers translate as follows into rendered files.
|
||||||
|
|
||||||
|
1. The `project_slug` is used as a directory name for your Python project where spaces and underscores are replaced-with-dashes. It's also used for a few example variables where `we_use_underscores` instead.
|
||||||
|
```
|
||||||
|
.
|
||||||
|
└── update-firewall-source
|
||||||
|
├── examples
|
||||||
|
│  └── config.ini.example
|
||||||
|
├── requirements.in
|
||||||
|
├── requirements.txt
|
||||||
|
└── update-firewall-source.py
|
||||||
|
```
|
||||||
|
|
||||||
|
2. The `use_rich_logging` variable adds settings and examples that make ample use of the [Rich package](https://github.com/Textualize/rich/) for beautiful logging. You typically want this so it defaults to `yes`. Just hit `Enter` to confirm. The setting also adds necessary requirements.
|
||||||
|
|
||||||
|
3. With `use_config_ini` you're getting a boat load of functions, presets, variables and examples that integrate a config.ini file via the `configparser` module.
|
||||||
|
|
||||||
|
4. Lastly with `use_inflect` you're adding the `inflect` module which does grammatically correct text rendering such as plural and singular. It also includes a few examples.
|
||||||
|
|
||||||
|
|
||||||
|
## Result
|
||||||
|
|
||||||
|
### Enable Rich, configparser and inflect
|
||||||
|
|
||||||
|
Above example of a Python project with all of Rich, `configparser` and `inflect` enabled will give you a directory structure like this:
|
||||||
|
```
|
||||||
|
.
|
||||||
|
└── update-firewall-source
|
||||||
|
├── examples
|
||||||
|
│  └── config.ini.example
|
||||||
|
├── requirements.in
|
||||||
|
├── requirements.txt
|
||||||
|
└── update-firewall-source.py
|
||||||
|
```
|
||||||
|
You can see real-life example file content over at [examples/update-firewall-source](examples/update-firewall-source). Cookiecutter has generated all necessary dependencies with pinned versions and a `update-firewall-source.py` script file to get you started.
|
8
python-naive/cookiecutter.json
Normal file
8
python-naive/cookiecutter.json
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
{
|
||||||
|
"project_slug": "project-slug",
|
||||||
|
"__project_slug": "{{ cookiecutter.project_slug.lower().replace(' ', '-').replace('_', '-') }}",
|
||||||
|
"__project_slug_under": "{{ cookiecutter.project_slug.lower().replace(' ', '_').replace('-', '_') }}",
|
||||||
|
"use_rich_logging": ["yes", "no"],
|
||||||
|
"use_config_ini": ["yes", "no"],
|
||||||
|
"use_inflect": ["yes", "no"]
|
||||||
|
}
|
@@ -0,0 +1,18 @@
|
|||||||
|
[DEFAULT]
|
||||||
|
self_name = update-firewall-source
|
||||||
|
tmp_base_dir = /tmp/%(self_name)s
|
||||||
|
state_base_dir = /var/lib/%(self_name)s
|
||||||
|
state_files_dir = %(state_base_dir)s/state
|
||||||
|
state_file_retention = 50
|
||||||
|
state_file_name_prefix = state-
|
||||||
|
state_file_name_suffix = .log
|
||||||
|
update_firewall_source_some_option = "http://localhost:8000/api/query"
|
||||||
|
another_option = "first"
|
||||||
|
|
||||||
|
[this-is-a-section]
|
||||||
|
min_duration = 1200
|
||||||
|
max_duration = 3000
|
||||||
|
title_not_regex = this|that|somethingelse
|
||||||
|
query = @http-payload.json
|
||||||
|
dl_dir = /tmp/some/dir
|
||||||
|
another_option = "overwriting_from_default"
|
@@ -0,0 +1,2 @@
|
|||||||
|
rich
|
||||||
|
inflect
|
@@ -0,0 +1,14 @@
|
|||||||
|
#
|
||||||
|
# This file is autogenerated by pip-compile with python 3.10
|
||||||
|
# To update, run:
|
||||||
|
#
|
||||||
|
# pip-compile
|
||||||
|
#
|
||||||
|
commonmark==0.9.1
|
||||||
|
# via rich
|
||||||
|
inflect==5.6.0
|
||||||
|
# via -r requirements.in
|
||||||
|
pygments==2.12.0
|
||||||
|
# via rich
|
||||||
|
rich==12.4.4
|
||||||
|
# via -r requirements.in
|
@@ -0,0 +1,237 @@
|
|||||||
|
# Path and env manipulation
|
||||||
|
import os
|
||||||
|
# Use a config file
|
||||||
|
import configparser
|
||||||
|
# Exit with various exit codes
|
||||||
|
import sys
|
||||||
|
# Manipulate style and content of logs
|
||||||
|
import logging
|
||||||
|
from rich.logging import RichHandler
|
||||||
|
# Correctly generate plurals, singular nouns etc.
|
||||||
|
import inflect
|
||||||
|
|
||||||
|
|
||||||
|
# Exit codes
|
||||||
|
# 1: Config file invalid, it has no sections
|
||||||
|
# 2: Config file invalid, sections must define at least CONST.CFG_MANDATORY
|
||||||
|
# 7 : An option that must have a non-null value is either unset or null
|
||||||
|
|
||||||
|
|
||||||
|
class CONST(object):
|
||||||
|
__slots__ = ()
|
||||||
|
LOG_FORMAT = "%(message)s"
|
||||||
|
# How to find a config file
|
||||||
|
CFG_THIS_FILE_DIRNAME = os.path.dirname(__file__)
|
||||||
|
CFG_DEFAULT_FILENAME = "config.ini"
|
||||||
|
CFG_DEFAULT_ABS_PATH = os.path.join(CFG_THIS_FILE_DIRNAME, CFG_DEFAULT_FILENAME)
|
||||||
|
# Values you don't have to set, these are their internal defaults. You may optionally add a key 'is_global' equal
|
||||||
|
# to either True or False. By default if left off it'll be assumed False. Script will treat values where
|
||||||
|
# 'is_global' equals True as not being overridable in a '[section]'. It's a setting that only makes sense in a
|
||||||
|
# global context for the entire script. An option where 'empty_ok' equals True can safely be unset or set to
|
||||||
|
# an empty string. An example config.ini file may give a sane config example value here, removing that value
|
||||||
|
# still results in a valid file.
|
||||||
|
CFG_KNOWN_DEFAULTS = [
|
||||||
|
{"key": "self_name", "value": "update-firewall-source", "empty_ok": False},
|
||||||
|
{"key": "tmp_base_dir", "value": os.path.join(CFG_THIS_FILE_DIRNAME, "data/tmp/%(self_name)s"),
|
||||||
|
"empty_ok": False},
|
||||||
|
{"key": "state_base_dir", "value": os.path.join(CFG_THIS_FILE_DIRNAME, "data/var/lib/%(self_name)s"),
|
||||||
|
"empty_ok": False},
|
||||||
|
{"key": "state_files_dir", "value": "%(state_base_dir)s/state", "is_global": False, "empty_ok": False},
|
||||||
|
{"key": "state_file_retention", "value": "50", "is_global": False, "empty_ok": True},
|
||||||
|
{"key": "state_file_name_prefix", "value": "state-", "is_global": False, "empty_ok": True},
|
||||||
|
{"key": "state_file_name_suffix", "value": ".log", "is_global": False, "empty_ok": True},
|
||||||
|
{"key": "update_firewall_source_some_option", "value": "http://localhost:8000/api/query", "is_global": True,
|
||||||
|
"empty_ok": False},
|
||||||
|
{"key": "another_option", "value": "first", "is_global": True, "empty_ok": True}
|
||||||
|
]
|
||||||
|
# In all sections other than 'default' the following settings are known and accepted. We ignore other settings.
|
||||||
|
# Per CFG_KNOWN_DEFAULTS above most '[DEFAULT]' options are accepted by virtue of being defaults and overridable.
|
||||||
|
# The only exception are options where "is_global" equals True, they can't be overridden in '[sections]'; any
|
||||||
|
# attempt at doing it anyway will be ignored. The main purpose of this list is to name settings that do not have
|
||||||
|
# a default value but can - if set - influence how a '[section]' behaves. Repeating a '[DEFAULT]' here does not
|
||||||
|
# make sense. We use 'is_mandatory' to determine if we have to raise errors on missing settings. Here
|
||||||
|
# 'is_mandatory' means the setting must be given in a '[section]'. It may be empty.
|
||||||
|
CFG_KNOWN_SECTION = [
|
||||||
|
# {"key": "an_option", "is_mandatory": True},
|
||||||
|
# {"key": "another_one", "is_mandatory": False}
|
||||||
|
]
|
||||||
|
CFG_MANDATORY = [section_cfg["key"] for section_cfg in CFG_KNOWN_SECTION if section_cfg["is_mandatory"]]
|
||||||
|
|
||||||
|
|
||||||
|
is_systemd = any([systemd_env_var in os.environ for systemd_env_var in ["SYSTEMD_EXEC_PID", "INVOCATION_ID"]])
|
||||||
|
logging.basicConfig(
|
||||||
|
# Default for all modules is NOTSET so log everything
|
||||||
|
level="NOTSET",
|
||||||
|
format=CONST.LOG_FORMAT,
|
||||||
|
datefmt="[%X]",
|
||||||
|
handlers=[RichHandler(
|
||||||
|
show_time=False if is_systemd else True,
|
||||||
|
show_path=False if is_systemd else True,
|
||||||
|
show_level=False if is_systemd else True,
|
||||||
|
rich_tracebacks=True
|
||||||
|
)]
|
||||||
|
)
|
||||||
|
log = logging.getLogger("rich")
|
||||||
|
# Our own code logs with this level
|
||||||
|
log.setLevel(os.environ.get("LOGLEVEL") if "LOGLEVEL" in [k for k, v in os.environ.items()] else logging.INFO)
|
||||||
|
|
||||||
|
p = inflect.engine()
|
||||||
|
|
||||||
|
|
||||||
|
# Use this version of class ConfigParser to log.debug contents of our config file. When parsing sections other than
|
||||||
|
# 'default' we don't want to reprint defaults over and over again. This custom class achieves that.
|
||||||
|
class ConfigParser(
|
||||||
|
configparser.ConfigParser):
|
||||||
|
"""Can get options() without defaults
|
||||||
|
|
||||||
|
Taken from https://stackoverflow.com/a/12600066.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def options(self, section, no_defaults=False, **kwargs):
|
||||||
|
if no_defaults:
|
||||||
|
try:
|
||||||
|
return list(self._sections[section].keys())
|
||||||
|
except KeyError:
|
||||||
|
raise configparser.NoSectionError(section)
|
||||||
|
else:
|
||||||
|
return super().options(section)
|
||||||
|
|
||||||
|
|
||||||
|
ini_defaults = []
|
||||||
|
internal_defaults = {default["key"]: default["value"] for default in CONST.CFG_KNOWN_DEFAULTS}
|
||||||
|
internal_globals = [default["key"] for default in CONST.CFG_KNOWN_DEFAULTS if default["is_global"]]
|
||||||
|
internal_empty_ok = [default["key"] for default in CONST.CFG_KNOWN_DEFAULTS if default["empty_ok"]]
|
||||||
|
config = ConfigParser(defaults=internal_defaults,
|
||||||
|
converters={'list': lambda x: [i.strip() for i in x.split(',') if len(x) > 0]})
|
||||||
|
config.read(CONST.CFG_DEFAULT_ABS_PATH)
|
||||||
|
|
||||||
|
|
||||||
|
def print_section_header(
|
||||||
|
header: str) -> str:
|
||||||
|
return f"Loading config section '[{header}]' ..."
|
||||||
|
|
||||||
|
|
||||||
|
def validate_default_section(
|
||||||
|
config_obj: configparser.ConfigParser()) -> None:
|
||||||
|
log.debug(f"Loading config from file '{CONST.CFG_DEFAULT_ABS_PATH}' ...")
|
||||||
|
if not config_obj.sections():
|
||||||
|
log.debug(f"No config sections found in '{CONST.CFG_DEFAULT_ABS_PATH}'. Exiting 1 ...")
|
||||||
|
sys.exit(1)
|
||||||
|
if config.defaults():
|
||||||
|
log.debug(f"Symbol legend:\n"
|
||||||
|
f"* Default from section '[{config_obj.default_section}]'\n"
|
||||||
|
f": Global option from '[{config_obj.default_section}]', can not be overridden in local sections\n"
|
||||||
|
f"~ Local option, doesn't exist in '[{config_obj.default_section}]'\n"
|
||||||
|
f"+ Local override of a value from '[{config_obj.default_section}]'\n"
|
||||||
|
f"= Local override, same value as in '[{config_obj.default_section}]'\n"
|
||||||
|
f"# Local attempt at overriding a global, will be ignored")
|
||||||
|
log.debug(print_section_header(config_obj.default_section))
|
||||||
|
for default in config_obj.defaults():
|
||||||
|
ini_defaults.append({default: config_obj[config_obj.default_section][default]})
|
||||||
|
if default in internal_globals:
|
||||||
|
log.debug(f": {default} = {config_obj[config_obj.default_section][default]}")
|
||||||
|
else:
|
||||||
|
log.debug(f"* {default} = {config_obj[config_obj.default_section][default]}")
|
||||||
|
else:
|
||||||
|
log.debug(f"No defaults defined")
|
||||||
|
|
||||||
|
|
||||||
|
def config_has_valid_section(
|
||||||
|
config_obj: configparser.ConfigParser()) -> bool:
|
||||||
|
has_valid_section = False
|
||||||
|
for config_obj_section in config_obj.sections():
|
||||||
|
if set(CONST.CFG_MANDATORY).issubset(config_obj.options(config_obj_section)):
|
||||||
|
has_valid_section = True
|
||||||
|
break
|
||||||
|
return has_valid_section
|
||||||
|
|
||||||
|
|
||||||
|
def is_default(
|
||||||
|
config_key: str) -> bool:
|
||||||
|
return any(config_key in ini_default for ini_default in ini_defaults)
|
||||||
|
|
||||||
|
|
||||||
|
def is_global(
|
||||||
|
config_key: str) -> bool:
|
||||||
|
return config_key in internal_globals
|
||||||
|
|
||||||
|
|
||||||
|
def is_same_as_default(
|
||||||
|
config_kv_pair: dict) -> bool:
|
||||||
|
return config_kv_pair in ini_defaults
|
||||||
|
|
||||||
|
|
||||||
|
def we_have_unset_options(
|
||||||
|
config_obj: configparser.ConfigParser(),
|
||||||
|
section_name: str) -> list:
|
||||||
|
|
||||||
|
options_must_be_non_empty = []
|
||||||
|
|
||||||
|
for option in config_obj.options(section_name):
|
||||||
|
if not config_obj.get(section_name, option):
|
||||||
|
if option not in internal_empty_ok:
|
||||||
|
log.warning(f"In section '[{section_name}]' option '{option}' is empty, it mustn't be.")
|
||||||
|
options_must_be_non_empty.append(option)
|
||||||
|
|
||||||
|
return options_must_be_non_empty
|
||||||
|
|
||||||
|
|
||||||
|
def validate_config_sections(
|
||||||
|
config_obj: configparser.ConfigParser()) -> None:
|
||||||
|
for this_section in config_obj.sections():
|
||||||
|
log.debug(print_section_header(this_section))
|
||||||
|
|
||||||
|
unset_options = we_have_unset_options(config_obj, this_section)
|
||||||
|
if unset_options:
|
||||||
|
log.error(f"""{p.plural("Option", len(unset_options))} {unset_options} """
|
||||||
|
f"""{p.plural("is", len(unset_options))} unset. """
|
||||||
|
f"""{p.singular_noun("They", len(unset_options))} """
|
||||||
|
f"must have a non-null value. "
|
||||||
|
f"""{p.plural("Default", len(unset_options))} {p.plural("is", len(unset_options))}:""")
|
||||||
|
for unset_option in unset_options:
|
||||||
|
log.error(f"{unset_option} = {internal_defaults[unset_option]}")
|
||||||
|
log.error(f"Exiting 7 ...")
|
||||||
|
sys.exit(7)
|
||||||
|
|
||||||
|
if not set(CONST.CFG_MANDATORY).issubset(config_obj.options(this_section, no_defaults=True)):
|
||||||
|
log.warning(f"Config section '[{this_section}]' does not have all mandatory options "
|
||||||
|
f"{CONST.CFG_MANDATORY} set, skipping section ...")
|
||||||
|
config_obj.remove_section(this_section)
|
||||||
|
else:
|
||||||
|
for key in config_obj.options(this_section, no_defaults=True):
|
||||||
|
kv_prefix = "~"
|
||||||
|
remove_from_section = False
|
||||||
|
if is_global(key):
|
||||||
|
kv_prefix = "#"
|
||||||
|
remove_from_section = True
|
||||||
|
elif is_default(key):
|
||||||
|
kv_prefix = "+"
|
||||||
|
if is_same_as_default({key: config_obj[this_section][key]}):
|
||||||
|
kv_prefix = "="
|
||||||
|
log.debug(f"{kv_prefix} {key} = {config_obj[this_section][key]}")
|
||||||
|
if remove_from_section:
|
||||||
|
config_obj.remove_option(this_section, key)
|
||||||
|
|
||||||
|
|
||||||
|
def an_important_function(
|
||||||
|
section_name: str,
|
||||||
|
config_obj: configparser.ConfigParser(),
|
||||||
|
whatever: str) -> list:
|
||||||
|
min_duration = config_obj.getint(section_name, "min_duration")
|
||||||
|
max_duration = config_obj.getint(section_name, "max_duration")
|
||||||
|
return ["I", "am", "a", "list"]
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
validate_default_section(config)
|
||||||
|
if config_has_valid_section(config):
|
||||||
|
validate_config_sections(config)
|
||||||
|
else:
|
||||||
|
log.error(f"No valid config section found. A valid config section has at least the mandatory options "
|
||||||
|
f"{CONST.CFG_MANDATORY} set. Exiting 2 ...")
|
||||||
|
sys.exit(2)
|
||||||
|
|
||||||
|
log.debug(f"Iterating over config sections ...")
|
||||||
|
for section in config.sections():
|
||||||
|
log.info(f"Processing section '[{section}]' ...")
|
||||||
|
# ...
|
17
python-naive/hooks/post_gen_project.py
Normal file
17
python-naive/hooks/post_gen_project.py
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
|
project_dir = os.getcwd()
|
||||||
|
examples_dir_name = "examples"
|
||||||
|
config_ini_file_name = "config.ini.example"
|
||||||
|
examples_dir_abs = os.path.join(project_dir, examples_dir_name)
|
||||||
|
config_ini_file_abs = os.path.join(project_dir, examples_dir_name, config_ini_file_name)
|
||||||
|
|
||||||
|
if {% if cookiecutter.use_config_ini == "yes" -%}False{% else -%}True{%- endif -%}:
|
||||||
|
try:
|
||||||
|
os.remove(config_ini_file_abs)
|
||||||
|
try:
|
||||||
|
os.rmdir(examples_dir_abs)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
except OSError:
|
||||||
|
pass
|
@@ -0,0 +1,18 @@
|
|||||||
|
[DEFAULT]
|
||||||
|
self_name = {{ cookiecutter.__project_slug }}
|
||||||
|
tmp_base_dir = /tmp/%(self_name)s
|
||||||
|
state_base_dir = /var/lib/%(self_name)s
|
||||||
|
state_files_dir = %(state_base_dir)s/state
|
||||||
|
state_file_retention = 50
|
||||||
|
state_file_name_prefix = state-
|
||||||
|
state_file_name_suffix = .log
|
||||||
|
{{ cookiecutter.__project_slug_under }}_some_option = "http://localhost:8000/api/query"
|
||||||
|
another_option = "first"
|
||||||
|
|
||||||
|
[this-is-a-section]
|
||||||
|
min_duration = 1200
|
||||||
|
max_duration = 3000
|
||||||
|
title_not_regex = this|that|somethingelse
|
||||||
|
query = @http-payload.json
|
||||||
|
dl_dir = /tmp/some/dir
|
||||||
|
another_option = "overwriting_from_default"
|
@@ -0,0 +1,6 @@
|
|||||||
|
{%- if cookiecutter.use_rich_logging == "yes" -%}
|
||||||
|
rich
|
||||||
|
{% endif -%}
|
||||||
|
{%- if cookiecutter.use_inflect == "yes" -%}
|
||||||
|
inflect
|
||||||
|
{% endif -%}
|
@@ -0,0 +1,22 @@
|
|||||||
|
{%- if cookiecutter.use_rich_logging == "yes" or cookiecutter.use_inflect == "yes" -%}
|
||||||
|
#
|
||||||
|
# This file is autogenerated by pip-compile with python 3.10
|
||||||
|
# To update, run:
|
||||||
|
#
|
||||||
|
# pip-compile
|
||||||
|
#
|
||||||
|
{% endif -%}
|
||||||
|
{%- if cookiecutter.use_rich_logging == "yes" -%}
|
||||||
|
commonmark==0.9.1
|
||||||
|
# via rich
|
||||||
|
{% endif -%}
|
||||||
|
{%- if cookiecutter.use_inflect == "yes" -%}
|
||||||
|
inflect==5.6.0
|
||||||
|
# via -r requirements.in
|
||||||
|
{% endif -%}
|
||||||
|
{%- if cookiecutter.use_rich_logging == "yes" -%}
|
||||||
|
pygments==2.12.0
|
||||||
|
# via rich
|
||||||
|
rich==12.4.4
|
||||||
|
# via -r requirements.in
|
||||||
|
{% endif -%}
|
@@ -0,0 +1,267 @@
|
|||||||
|
{% if cookiecutter.use_config_ini == "yes" -%}
|
||||||
|
# Path and env manipulation
|
||||||
|
import os
|
||||||
|
# Use a config file
|
||||||
|
import configparser
|
||||||
|
# Exit with various exit codes
|
||||||
|
import sys
|
||||||
|
{%- endif %}
|
||||||
|
{%- if cookiecutter.use_rich_logging == "yes" %}
|
||||||
|
# Manipulate style and content of logs
|
||||||
|
import logging
|
||||||
|
from rich.logging import RichHandler
|
||||||
|
{%- endif %}
|
||||||
|
{%- if cookiecutter.use_inflect == "yes" %}
|
||||||
|
# Correctly generate plurals, singular nouns etc.
|
||||||
|
import inflect
|
||||||
|
{%- endif %}
|
||||||
|
{%- if cookiecutter.use_rich_logging == "yes" or cookiecutter.use_config_ini == "yes" %}
|
||||||
|
|
||||||
|
|
||||||
|
# Exit codes
|
||||||
|
# 1: Config file invalid, it has no sections
|
||||||
|
# 2: Config file invalid, sections must define at least CONST.CFG_MANDATORY
|
||||||
|
# 7 : An option that must have a non-null value is either unset or null
|
||||||
|
|
||||||
|
|
||||||
|
class CONST(object):
|
||||||
|
__slots__ = ()
|
||||||
|
{%- endif %}
|
||||||
|
{%- if cookiecutter.use_rich_logging == "yes" %}
|
||||||
|
LOG_FORMAT = "%(message)s"
|
||||||
|
{%- endif %}
|
||||||
|
{%- if cookiecutter.use_config_ini == "yes" %}
|
||||||
|
# How to find a config file
|
||||||
|
CFG_THIS_FILE_DIRNAME = os.path.dirname(__file__)
|
||||||
|
CFG_DEFAULT_FILENAME = "config.ini"
|
||||||
|
CFG_DEFAULT_ABS_PATH = os.path.join(CFG_THIS_FILE_DIRNAME, CFG_DEFAULT_FILENAME)
|
||||||
|
# Values you don't have to set, these are their internal defaults. You may optionally add a key 'is_global' equal
|
||||||
|
# to either True or False. By default if left off it'll be assumed False. Script will treat values where
|
||||||
|
# 'is_global' equals True as not being overridable in a '[section]'. It's a setting that only makes sense in a
|
||||||
|
# global context for the entire script. An option where 'empty_ok' equals True can safely be unset or set to
|
||||||
|
# an empty string. An example config.ini file may give a sane config example value here, removing that value
|
||||||
|
# still results in a valid file.
|
||||||
|
CFG_KNOWN_DEFAULTS = [
|
||||||
|
{"key": "self_name", "value": "{{ cookiecutter.__project_slug }}", "empty_ok": False},
|
||||||
|
{"key": "tmp_base_dir", "value": os.path.join(CFG_THIS_FILE_DIRNAME, "data/tmp/%(self_name)s"),
|
||||||
|
"empty_ok": False},
|
||||||
|
{"key": "state_base_dir", "value": os.path.join(CFG_THIS_FILE_DIRNAME, "data/var/lib/%(self_name)s"),
|
||||||
|
"empty_ok": False},
|
||||||
|
{"key": "state_files_dir", "value": "%(state_base_dir)s/state", "is_global": False, "empty_ok": False},
|
||||||
|
{"key": "state_file_retention", "value": "50", "is_global": False, "empty_ok": True},
|
||||||
|
{"key": "state_file_name_prefix", "value": "state-", "is_global": False, "empty_ok": True},
|
||||||
|
{"key": "state_file_name_suffix", "value": ".log", "is_global": False, "empty_ok": True},
|
||||||
|
{"key": "{{ cookiecutter.__project_slug_under }}_some_option", "value": "http://localhost:8000/api/query", "is_global": True,
|
||||||
|
"empty_ok": False},
|
||||||
|
{"key": "another_option", "value": "first", "is_global": True, "empty_ok": True}
|
||||||
|
]
|
||||||
|
# In all sections other than 'default' the following settings are known and accepted. We ignore other settings.
|
||||||
|
# Per CFG_KNOWN_DEFAULTS above most '[DEFAULT]' options are accepted by virtue of being defaults and overridable.
|
||||||
|
# The only exception are options where "is_global" equals True, they can't be overridden in '[sections]'; any
|
||||||
|
# attempt at doing it anyway will be ignored. The main purpose of this list is to name settings that do not have
|
||||||
|
# a default value but can - if set - influence how a '[section]' behaves. Repeating a '[DEFAULT]' here does not
|
||||||
|
# make sense. We use 'is_mandatory' to determine if we have to raise errors on missing settings. Here
|
||||||
|
# 'is_mandatory' means the setting must be given in a '[section]'. It may be empty.
|
||||||
|
CFG_KNOWN_SECTION = [
|
||||||
|
# {"key": "an_option", "is_mandatory": True},
|
||||||
|
# {"key": "another_one", "is_mandatory": False}
|
||||||
|
]
|
||||||
|
CFG_MANDATORY = [section_cfg["key"] for section_cfg in CFG_KNOWN_SECTION if section_cfg["is_mandatory"]]
|
||||||
|
{%- endif %}
|
||||||
|
{%- if cookiecutter.use_rich_logging == "yes" %}
|
||||||
|
|
||||||
|
|
||||||
|
is_systemd = any([systemd_env_var in os.environ for systemd_env_var in ["SYSTEMD_EXEC_PID", "INVOCATION_ID"]])
|
||||||
|
logging.basicConfig(
|
||||||
|
# Default for all modules is NOTSET so log everything
|
||||||
|
level="NOTSET",
|
||||||
|
format=CONST.LOG_FORMAT,
|
||||||
|
datefmt="[%X]",
|
||||||
|
handlers=[RichHandler(
|
||||||
|
show_time=False if is_systemd else True,
|
||||||
|
show_path=False if is_systemd else True,
|
||||||
|
show_level=False if is_systemd else True,
|
||||||
|
rich_tracebacks=True
|
||||||
|
)]
|
||||||
|
)
|
||||||
|
log = logging.getLogger("rich")
|
||||||
|
# Our own code logs with this level
|
||||||
|
log.setLevel(os.environ.get("LOGLEVEL") if "LOGLEVEL" in [k for k, v in os.environ.items()] else logging.INFO)
|
||||||
|
{%- endif %}{%- if cookiecutter.use_rich_logging == "no" %}
|
||||||
|
{% endif %}
|
||||||
|
{%- if cookiecutter.use_inflect == "yes" %}
|
||||||
|
|
||||||
|
p = inflect.engine()
|
||||||
|
{%- endif %}
|
||||||
|
{%- if cookiecutter.use_config_ini == "yes" %}
|
||||||
|
|
||||||
|
|
||||||
|
# Use this version of class ConfigParser to {% if cookiecutter.use_rich_logging == "yes" -%}log.debug{%- else -%}print{%- endif %} contents of our config file. When parsing sections other than
|
||||||
|
# 'default' we don't want to reprint defaults over and over again. This custom class achieves that.
|
||||||
|
class ConfigParser(
|
||||||
|
configparser.ConfigParser):
|
||||||
|
"""Can get options() without defaults
|
||||||
|
|
||||||
|
Taken from https://stackoverflow.com/a/12600066.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def options(self, section, no_defaults=False, **kwargs):
|
||||||
|
if no_defaults:
|
||||||
|
try:
|
||||||
|
return list(self._sections[section].keys())
|
||||||
|
except KeyError:
|
||||||
|
raise configparser.NoSectionError(section)
|
||||||
|
else:
|
||||||
|
return super().options(section)
|
||||||
|
|
||||||
|
|
||||||
|
ini_defaults = []
|
||||||
|
internal_defaults = {default["key"]: default["value"] for default in CONST.CFG_KNOWN_DEFAULTS}
|
||||||
|
internal_globals = [default["key"] for default in CONST.CFG_KNOWN_DEFAULTS if default["is_global"]]
|
||||||
|
internal_empty_ok = [default["key"] for default in CONST.CFG_KNOWN_DEFAULTS if default["empty_ok"]]
|
||||||
|
config = ConfigParser(defaults=internal_defaults,
|
||||||
|
converters={'list': lambda x: [i.strip() for i in x.split(',') if len(x) > 0]})
|
||||||
|
config.read(CONST.CFG_DEFAULT_ABS_PATH)
|
||||||
|
|
||||||
|
|
||||||
|
def print_section_header(
|
||||||
|
header: str) -> str:
|
||||||
|
return f"Loading config section '[{header}]' ..."
|
||||||
|
|
||||||
|
|
||||||
|
def validate_default_section(
|
||||||
|
config_obj: configparser.ConfigParser()) -> None:
|
||||||
|
{% if cookiecutter.use_rich_logging == "yes" -%}log.debug{%- else -%}print{%- endif %}(f"Loading config from file '{CONST.CFG_DEFAULT_ABS_PATH}' ...")
|
||||||
|
if not config_obj.sections():
|
||||||
|
{% if cookiecutter.use_rich_logging == "yes" -%}log.debug{%- else -%}print{%- endif %}(f"No config sections found in '{CONST.CFG_DEFAULT_ABS_PATH}'. Exiting 1 ...")
|
||||||
|
sys.exit(1)
|
||||||
|
if config.defaults():
|
||||||
|
{% if cookiecutter.use_rich_logging == "yes" -%}log.debug{%- else -%}print{%- endif %}(f"Symbol legend:\n"
|
||||||
|
{% if cookiecutter.use_rich_logging == "yes" %} {% endif %}f"* Default from section '[{config_obj.default_section}]'\n"
|
||||||
|
{% if cookiecutter.use_rich_logging == "yes" %} {% endif %}f": Global option from '[{config_obj.default_section}]', can not be overridden in local sections\n"
|
||||||
|
{% if cookiecutter.use_rich_logging == "yes" %} {% endif %}f"~ Local option, doesn't exist in '[{config_obj.default_section}]'\n"
|
||||||
|
{% if cookiecutter.use_rich_logging == "yes" %} {% endif %}f"+ Local override of a value from '[{config_obj.default_section}]'\n"
|
||||||
|
{% if cookiecutter.use_rich_logging == "yes" %} {% endif %}f"= Local override, same value as in '[{config_obj.default_section}]'\n"
|
||||||
|
{% if cookiecutter.use_rich_logging == "yes" %} {% endif %}f"# Local attempt at overriding a global, will be ignored")
|
||||||
|
{% if cookiecutter.use_rich_logging == "yes" -%}log.debug{%- else -%}print{%- endif %}(print_section_header(config_obj.default_section))
|
||||||
|
for default in config_obj.defaults():
|
||||||
|
ini_defaults.append({default: config_obj[config_obj.default_section][default]})
|
||||||
|
if default in internal_globals:
|
||||||
|
{% if cookiecutter.use_rich_logging == "yes" -%}log.debug{%- else -%}print{%- endif %}(f": {default} = {config_obj[config_obj.default_section][default]}")
|
||||||
|
else:
|
||||||
|
{% if cookiecutter.use_rich_logging == "yes" -%}log.debug{%- else -%}print{%- endif %}(f"* {default} = {config_obj[config_obj.default_section][default]}")
|
||||||
|
else:
|
||||||
|
{% if cookiecutter.use_rich_logging == "yes" -%}log.debug{%- else -%}print{%- endif %}(f"No defaults defined")
|
||||||
|
|
||||||
|
|
||||||
|
def config_has_valid_section(
|
||||||
|
config_obj: configparser.ConfigParser()) -> bool:
|
||||||
|
has_valid_section = False
|
||||||
|
for config_obj_section in config_obj.sections():
|
||||||
|
if set(CONST.CFG_MANDATORY).issubset(config_obj.options(config_obj_section)):
|
||||||
|
has_valid_section = True
|
||||||
|
break
|
||||||
|
return has_valid_section
|
||||||
|
|
||||||
|
|
||||||
|
def is_default(
|
||||||
|
config_key: str) -> bool:
|
||||||
|
return any(config_key in ini_default for ini_default in ini_defaults)
|
||||||
|
|
||||||
|
|
||||||
|
def is_global(
|
||||||
|
config_key: str) -> bool:
|
||||||
|
return config_key in internal_globals
|
||||||
|
|
||||||
|
|
||||||
|
def is_same_as_default(
|
||||||
|
config_kv_pair: dict) -> bool:
|
||||||
|
return config_kv_pair in ini_defaults
|
||||||
|
|
||||||
|
|
||||||
|
def we_have_unset_options(
|
||||||
|
config_obj: configparser.ConfigParser(),
|
||||||
|
section_name: str) -> list:
|
||||||
|
|
||||||
|
options_must_be_non_empty = []
|
||||||
|
|
||||||
|
for option in config_obj.options(section_name):
|
||||||
|
if not config_obj.get(section_name, option):
|
||||||
|
if option not in internal_empty_ok:
|
||||||
|
{% if cookiecutter.use_rich_logging == "yes" -%}log.warning{%- else -%}print{%- endif %}(f"In section '[{section_name}]' option '{option}' is empty, it mustn't be.")
|
||||||
|
options_must_be_non_empty.append(option)
|
||||||
|
|
||||||
|
return options_must_be_non_empty
|
||||||
|
|
||||||
|
|
||||||
|
def validate_config_sections(
|
||||||
|
config_obj: configparser.ConfigParser()) -> None:
|
||||||
|
for this_section in config_obj.sections():
|
||||||
|
{% if cookiecutter.use_rich_logging == "yes" -%}log.debug{%- else -%}print{%- endif %}(print_section_header(this_section))
|
||||||
|
|
||||||
|
unset_options = we_have_unset_options(config_obj, this_section)
|
||||||
|
if unset_options:
|
||||||
|
{% if cookiecutter.use_rich_logging == "yes" -%}log.error{%- else -%}print{%- endif %}(f"""{% if cookiecutter.use_inflect == "yes" %}{p.plural("Option", len(unset_options))}{% else %}Options{% endif %} {unset_options} """
|
||||||
|
{% if cookiecutter.use_rich_logging == "yes" %} {% endif %}f"""{% if cookiecutter.use_inflect == "yes" %}{p.plural("is", len(unset_options))}{% else %}are{% endif %} unset. """
|
||||||
|
{% if cookiecutter.use_rich_logging == "yes" %} {% endif %}f"""{% if cookiecutter.use_inflect == "yes" %}{p.singular_noun("They", len(unset_options))}{% else %}They{% endif %} """
|
||||||
|
{% if cookiecutter.use_rich_logging == "yes" %} {% endif %}f"must have a non-null value. "
|
||||||
|
{% if cookiecutter.use_rich_logging == "yes" %} {% endif %}f"""{% if cookiecutter.use_inflect == "yes" %}{p.plural("Default", len(unset_options))} {p.plural("is", len(unset_options))}{% else %}Defaults are{% endif %}:""")
|
||||||
|
for unset_option in unset_options:
|
||||||
|
{% if cookiecutter.use_rich_logging == "yes" -%}log.error{%- else -%}print{%- endif %}(f"{unset_option} = {internal_defaults[unset_option]}")
|
||||||
|
{% if cookiecutter.use_rich_logging == "yes" -%}log.error{%- else -%}print{%- endif %}(f"Exiting 7 ...")
|
||||||
|
sys.exit(7)
|
||||||
|
|
||||||
|
if not set(CONST.CFG_MANDATORY).issubset(config_obj.options(this_section, no_defaults=True)):
|
||||||
|
{% if cookiecutter.use_rich_logging == "yes" -%}log.warning{%- else -%}print{%- endif %}(f"Config section '[{this_section}]' does not have all mandatory options "
|
||||||
|
{% if cookiecutter.use_rich_logging == "yes" %} {% endif %}f"{CONST.CFG_MANDATORY} set, skipping section ...")
|
||||||
|
config_obj.remove_section(this_section)
|
||||||
|
else:
|
||||||
|
for key in config_obj.options(this_section, no_defaults=True):
|
||||||
|
kv_prefix = "~"
|
||||||
|
remove_from_section = False
|
||||||
|
if is_global(key):
|
||||||
|
kv_prefix = "#"
|
||||||
|
remove_from_section = True
|
||||||
|
elif is_default(key):
|
||||||
|
kv_prefix = "+"
|
||||||
|
if is_same_as_default({key: config_obj[this_section][key]}):
|
||||||
|
kv_prefix = "="
|
||||||
|
{% if cookiecutter.use_rich_logging == "yes" -%}log.debug{%- else -%}print{%- endif %}(f"{kv_prefix} {key} = {config_obj[this_section][key]}")
|
||||||
|
if remove_from_section:
|
||||||
|
config_obj.remove_option(this_section, key)
|
||||||
|
{%- endif %}
|
||||||
|
|
||||||
|
|
||||||
|
def an_important_function(
|
||||||
|
section_name: str,
|
||||||
|
{%- if cookiecutter.use_config_ini == "yes" %}
|
||||||
|
config_obj: configparser.ConfigParser(),
|
||||||
|
{%- endif %}
|
||||||
|
whatever: str) -> list:
|
||||||
|
{%- if cookiecutter.use_config_ini == "yes" %}
|
||||||
|
min_duration = config_obj.getint(section_name, "min_duration")
|
||||||
|
max_duration = config_obj.getint(section_name, "max_duration")
|
||||||
|
{%- else %}
|
||||||
|
min_duration = 10
|
||||||
|
max_duration = 20
|
||||||
|
{%- endif %}
|
||||||
|
return ["I", "am", "a", "list"]
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
{% if cookiecutter.use_config_ini == "yes" -%}
|
||||||
|
validate_default_section(config)
|
||||||
|
if config_has_valid_section(config):
|
||||||
|
validate_config_sections(config)
|
||||||
|
else:
|
||||||
|
{% if cookiecutter.use_rich_logging == "yes" -%}log.error{%- else -%}print{%- endif %}(f"No valid config section found. A valid config section has at least the mandatory options "
|
||||||
|
{% if cookiecutter.use_rich_logging == "yes" %} {% endif %}f"{CONST.CFG_MANDATORY} set. Exiting 2 ...")
|
||||||
|
sys.exit(2)
|
||||||
|
|
||||||
|
{% if cookiecutter.use_rich_logging == "yes" -%}log.debug{%- else -%}print{%- endif %}(f"Iterating over config sections ...")
|
||||||
|
for section in config.sections():
|
||||||
|
{% if cookiecutter.use_rich_logging == "yes" -%}log.info{%- else -%}print{%- endif %}(f"Processing section '[{section}]' ...")
|
||||||
|
# ...
|
||||||
|
{%- else -%}
|
||||||
|
pass
|
||||||
|
{%- endif %}
|
Reference in New Issue
Block a user