Compare commits
115 Commits
Author | SHA1 | Date |
---|---|---|
einarf | e6ca4aa9ca | |
einarf | 093dab93ca | |
einarf | 405bd4af15 | |
dreadper | 28dda6b09d | |
dreadper | b400138b73 | |
Einar Forselv | b52655a23b | |
Maximilian Wehrstedt | 323e299b7e | |
einarf | 5c33ccf0b1 | |
einarf | 764aac6744 | |
einarf | bbe57dfd69 | |
einarf | 4517880846 | |
einarf | 1fefd63c72 | |
Einar Forselv | 4e1af219e2 | |
Einar Forselv | 93f080d5b3 | |
Jannik | 0fc620bb1f | |
Jannik | f7958d7db9 | |
Jannik | 18ddb173ac | |
Jannik | f59a046bbc | |
Einar Forselv | 8b934dc12f | |
Einar Forselv | 8e9105fed5 | |
Einar Forselv | d7492e51f6 | |
Einar Forselv | 07a19f7f42 | |
Einar Forselv | 13d8e07a33 | |
Einar Forselv | 25b39b9908 | |
Einar Forselv | d0fdf2d1d3 | |
Einar Forselv | 3aa0704045 | |
Einar Forselv | cf668e2153 | |
Einar Forselv | d4c77cf43d | |
Einar Forselv | cecc647a10 | |
Einar Forselv | 61ec487e24 | |
Einar Forselv | 0bab85f5cf | |
Einar Forselv | 1a100d73ab | |
Einar Forselv | 270137d931 | |
Einar Forselv | e4263822bf | |
Einar Forselv | 311bedb5ab | |
Einar Forselv | 88cf894689 | |
Einar Forselv | 6817f0999f | |
Einar Forselv | 74c0954e6f | |
Einar Forselv | f6995eb506 | |
Einar Forselv | ef28baed5e | |
Einar Forselv | 336cace237 | |
Einar Forselv | cab4676b91 | |
Einar Forselv | d002ad9390 | |
Einar Forselv | 98a10bf994 | |
Einar Forselv | 2535ce3421 | |
Einar Forselv | 8858f88ba4 | |
Einar Forselv | c5b7f11db7 | |
Einar Forselv | a099060b2e | |
Einar Forselv | dd40152fe1 | |
Einar Forselv | f77d61410d | |
Einar Forselv | 272244fecf | |
Einar Forselv | 7b2ffd8e7c | |
Einar Forselv | be173ff5cb | |
Einar Forselv | df92579e10 | |
Einar Forselv | fb3c564d7a | |
Einar Forselv | 151f3cfeeb | |
Einar Forselv | 2848738789 | |
Einar Forselv | cf402d77ed | |
Einar Forselv | ae835f30d3 | |
Einar Forselv | 1e21ff422f | |
Einar Forselv | 6347529701 | |
Einar Forselv | 3dacc0bfab | |
Einar Forselv | fa14880742 | |
Einar Forselv | 5eb773eb34 | |
Einar Forselv | e8123922df | |
Einar Forselv | be74715595 | |
Einar Forselv | 515702ae78 | |
Einar Forselv | ff49d9c018 | |
Einar Forselv | 187787425a | |
Einar Forselv | fa1c982bf5 | |
Einar Forselv | 2bbd329047 | |
Einar Forselv | 8097ac79af | |
Einar Forselv | 5082244949 | |
Einar Forselv | d9e5a62458 | |
Einar Forselv | 6085f5fc03 | |
Einar Forselv | d89ed781ef | |
Einar Forselv | e2dec9ffa0 | |
Einar Forselv | 2b3a702f21 | |
Einar Forselv | 3456e1a899 | |
Einar Forselv | 105cdbb65e | |
Einar Forselv | d671ffb626 | |
Einar Forselv | f988b42881 | |
Einar Forselv | 5d653c2c3c | |
Einar Forselv | c80b2774d4 | |
Einar Forselv | 12da998538 | |
Einar Forselv | d17d776339 | |
Einar Forselv | 758c3075f1 | |
Einar Forselv | 9cad6a5c71 | |
Einar Forselv | 4ebe16af14 | |
Einar Forselv | fd87ddc388 | |
Einar Forselv | 2cbc5aa6fa | |
Einar Forselv | ffa2dfc119 | |
Einar Forselv | cfc92b2284 | |
Einar Forselv | 216202dec7 | |
Einar Forselv | fab988a05e | |
Einar Forselv | 164834d3a9 | |
Einar Forselv | a0dfb04aa7 | |
Einar Forselv | 7f588c57ab | |
Einar Forselv | e01f7c6cff | |
Einar Forselv | 102073cb70 | |
Einar Forselv | e060c28c93 | |
Einar Forselv | 14903f3bbd | |
Einar Forselv | 96bd419a24 | |
Einar Forselv | 75ab549370 | |
Einar Forselv | 6f06d25db5 | |
Einar Forselv | 0a9e5edfe4 | |
Einar Forselv | 130be30268 | |
Einar Forselv | 0af9f2e8ee | |
Einar Forselv | c59f022a55 | |
Einar Forselv | 98fe448348 | |
Einar Forselv | 3708bb9100 | |
Einar Forselv | d7039cccf4 | |
Einar Forselv | 864c026402 | |
Einar Forselv | fcd18ba1cb | |
Einar Forselv | 915695043c |
|
@ -1,19 +0,0 @@
|
|||
.venv/
|
||||
.vscode/
|
||||
extras/
|
||||
restic_cache/
|
||||
restic_data/
|
||||
tests/
|
||||
.gitignore
|
||||
*.env
|
||||
*.log
|
||||
docker-compose.yaml
|
||||
*.ini
|
||||
*.egg-info
|
||||
__pycache__
|
||||
.DS_Store
|
||||
.git
|
||||
.pytest_cache
|
||||
.dockerignore
|
||||
build/
|
||||
docs/
|
Binary file not shown.
After Width: | Height: | Size: 6.1 KiB |
|
@ -21,6 +21,10 @@ restic_data/
|
|||
restic_cache/
|
||||
alerts.env
|
||||
|
||||
# docs
|
||||
# build
|
||||
build/
|
||||
docs/_build
|
||||
dist
|
||||
|
||||
# tests
|
||||
.tox
|
||||
|
|
|
@ -0,0 +1,24 @@
|
|||
# Read the Docs configuration file for Sphinx projects
|
||||
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
|
||||
|
||||
# Required
|
||||
version: 2
|
||||
|
||||
# Set the OS, Python version and other tools you might need
|
||||
build:
|
||||
os: ubuntu-22.04
|
||||
tools:
|
||||
python: "3.10"
|
||||
|
||||
# Build documentation in the "docs/" directory with Sphinx
|
||||
sphinx:
|
||||
configuration: docs/conf.py
|
||||
|
||||
# Optionally build your docs in additional formats such as PDF and ePub
|
||||
# formats:
|
||||
# - pdf
|
||||
# - epub
|
||||
|
||||
python:
|
||||
install:
|
||||
- requirements: docs/requirements.txt
|
|
@ -0,0 +1,17 @@
|
|||
language: python
|
||||
|
||||
sudo: false
|
||||
|
||||
matrix:
|
||||
include:
|
||||
python: 3.7
|
||||
dist: bionic
|
||||
sudo: true
|
||||
|
||||
install:
|
||||
- pip install -U setuptools pip wheel
|
||||
- pip install -r src/tests/requirements.txt
|
||||
- pip install ./src
|
||||
|
||||
script:
|
||||
- tox
|
11
Dockerfile
11
Dockerfile
|
@ -1,11 +0,0 @@
|
|||
FROM restic/restic:0.9.6
|
||||
|
||||
RUN apk update && apk add python3 dcron mariadb-client postgresql-client
|
||||
|
||||
ADD . /restic-compose-backup
|
||||
WORKDIR /restic-compose-backup
|
||||
RUN pip3 install -U pip setuptools && pip3 install -e .
|
||||
ENV XDG_CACHE_HOME=/cache
|
||||
|
||||
ENTRYPOINT []
|
||||
CMD ["./entrypoint.sh"]
|
237
README.md
237
README.md
|
@ -3,7 +3,8 @@
|
|||
|
||||
![docs](https://readthedocs.org/projects/restic-compose-backup/badge/?version=latest)
|
||||
|
||||
Backup using https://restic.net/ for a docker-compose setup.
|
||||
Backup using [restic] for a docker-compose setup.
|
||||
Currently tested with docker-ce 17, 18 and 19.
|
||||
|
||||
* [restic-compose-backup Documentation](https://restic-compose-backup.readthedocs.io)
|
||||
* [restic-compose-backup on Github](https://github.com/ZettaIO/restic-compose-backup)
|
||||
|
@ -11,189 +12,189 @@ Backup using https://restic.net/ for a docker-compose setup.
|
|||
|
||||
Features:
|
||||
|
||||
* Back up docker volumes or host binds
|
||||
* Back up mariadb postgres
|
||||
* Back up mariadb databases
|
||||
* Back up mysql databases
|
||||
* Notifications over mail/smtp
|
||||
* Notifications to Discord through webhooks
|
||||
* Backs up docker volumes or host binds
|
||||
* Backs up postgres, mariadb and mysql databases
|
||||
* Notifications over mail/smtp or Discord webhooks
|
||||
|
||||
Please report issus on [github](https://github.com/ZettaIO/restic-compose-backup/issues).
|
||||
|
||||
Automatically detects and backs up volumes, mysql, mariadb and postgres databases in a docker-compose setup.
|
||||
|
||||
* Each service in the compose setup is configured with a label
|
||||
to enable backup of volumes or databases
|
||||
* When backup starts a new instance of the container is created
|
||||
mapping in all the needed volumes. It will copy networks etc
|
||||
to ensure databases can be reached
|
||||
* Volumes are mounted to `/volumes/<service_name>/<path>`
|
||||
in the backup process container. `/volumes` is pushed into restic
|
||||
* Databases are backed up from stdin / dumps into restic using path `/databases/<service_name>/dump.sql`
|
||||
* Cron triggers backup at 2AM every day
|
||||
|
||||
## Install
|
||||
|
||||
```bash
|
||||
docker pull zettaio/restic-compose-backup
|
||||
```
|
||||
|
||||
.. or clone this repo and build it.
|
||||
## Configuration (env vars)
|
||||
|
||||
## Configuration
|
||||
|
||||
Required env variables for restic:
|
||||
Minimum configuration
|
||||
|
||||
```bash
|
||||
RESTIC_REPOSITORY
|
||||
RESTIC_PASSWORD
|
||||
```
|
||||
|
||||
Backend specific env vars : https://restic.readthedocs.io/en/stable/040_backup.html#environment-variables
|
||||
More config options can be found in the [documentation].
|
||||
|
||||
Additional env vars:
|
||||
Restic backend specific env vars : https://restic.readthedocs.io/en/stable/040_backup.html#environment-variables
|
||||
|
||||
## Compose Example
|
||||
|
||||
We simply control what should be backed up by adding
|
||||
labels to our containers. More details are covered
|
||||
in the [documentation].
|
||||
|
||||
restic-backup.env
|
||||
|
||||
```bash
|
||||
# Prune rules
|
||||
RESTIC_REPOSITORY=<whatever backend restic supports>
|
||||
RESTIC_PASSWORD=hopefullyasecturepw
|
||||
# snapshot prune rules
|
||||
RESTIC_KEEP_DAILY=7
|
||||
RESTIC_KEEP_WEEKLY=4
|
||||
RESTIC_KEEP_MONTHLY=12
|
||||
RESTIC_KEEP_YEARLY=3
|
||||
|
||||
# Logging level (debug,info,warning,error)
|
||||
LOG_LEVEL=info
|
||||
|
||||
# SMTP alerts
|
||||
EMAIL_HOST=my.mail.host
|
||||
EMAIL_PORT=465
|
||||
EMAIL_HOST_USER=johndoe
|
||||
EMAIL_HOST_PASSWORD=s3cr3tpassw0rd
|
||||
EMAIL_SEND_TO=johndoe@gmail.com
|
||||
|
||||
# Discord webhook
|
||||
DISCORD_WEBHOOK=https://discordapp.com/api/webhooks/...
|
||||
# Cron schedule. Run every day at 1am
|
||||
CRON_SCHEDULE="0 1 * * *"
|
||||
```
|
||||
|
||||
### Volumes
|
||||
docker-compose.yaml
|
||||
|
||||
```yaml
|
||||
version: '3'
|
||||
services:
|
||||
# The backup service
|
||||
backup:
|
||||
build: restic-compose-backup
|
||||
environment:
|
||||
- RESTIC_REPOSITORY=<whatever restic supports>
|
||||
- RESTIC_PASSWORD=hopefullyasecturepw
|
||||
- RESTIC_KEEP_DAILY=7
|
||||
- RESTIC_KEEP_WEEKLY=4
|
||||
- RESTIC_KEEP_MONTHLY=12
|
||||
- RESTIC_KEEP_YEARLY=3
|
||||
image: zettaio/restic-compose-backup:<version>
|
||||
env_file:
|
||||
- some_other_vars.env
|
||||
- restic-backup.env
|
||||
volumes:
|
||||
# We need to communicate with docker
|
||||
- /var/run/docker.sock:/tmp/docker.sock:ro
|
||||
|
||||
example:
|
||||
# Persistent storage of restic cache (greatly speeds up all restic operations)
|
||||
- cache:/cache
|
||||
web:
|
||||
image: some_image
|
||||
# Enable volume backup with label
|
||||
labels:
|
||||
# Enables backup of the volumes below
|
||||
restic-compose-backup.volumes: true
|
||||
# These volumes will be backed up
|
||||
volumes:
|
||||
# Docker volume
|
||||
- media:/srv/media
|
||||
# Host map
|
||||
- /srv/files:/srv/files
|
||||
|
||||
volumes:
|
||||
media:
|
||||
```
|
||||
|
||||
A simple `include` and `exclude` filter is also available.
|
||||
|
||||
```yaml
|
||||
example:
|
||||
image: some_image
|
||||
labels:
|
||||
restic-compose-backup.volumes: true
|
||||
restic-compose-backup.volumes.include: "files,data"
|
||||
volumes:
|
||||
# Source don't match include filter. No backup.
|
||||
- media:/srv/media
|
||||
# Matches include filter
|
||||
- files:/srv/files
|
||||
- /srv/data:/srv/data
|
||||
|
||||
volumes:
|
||||
media:
|
||||
files:
|
||||
|
||||
```
|
||||
|
||||
Exclude
|
||||
|
||||
```yaml
|
||||
example:
|
||||
image: some_image
|
||||
labels:
|
||||
restic-compose-backup.volumes: true
|
||||
restic-compose-backup.volumes.exclude: "media"
|
||||
volumes:
|
||||
# Excluded by filter
|
||||
- media:/srv/media
|
||||
# Backed up
|
||||
- files:/srv/files
|
||||
- /srv/data:/srv/data
|
||||
|
||||
volumes:
|
||||
media:
|
||||
files:
|
||||
```
|
||||
|
||||
### Databases
|
||||
|
||||
Will dump databases directly into restic through stdin.
|
||||
They will appear in restic as a separate snapshot with
|
||||
path `/databases/<service_name>/dump.sql` or similar.
|
||||
|
||||
```yaml
|
||||
mariadb:
|
||||
image: mariadb:10
|
||||
labels:
|
||||
# Enables backup of this database
|
||||
restic-compose-backup.mariadb: true
|
||||
```
|
||||
|
||||
```yaml
|
||||
env_file:
|
||||
mariadb-credentials.env
|
||||
volumes:
|
||||
- mysqldata:/var/lib/mysql
|
||||
mysql:
|
||||
image: mysql:5
|
||||
labels:
|
||||
# Enables backup of this database
|
||||
restic-compose-backup.mysql: true
|
||||
```
|
||||
env_file:
|
||||
mysql-credentials.env
|
||||
volumes:
|
||||
- mysqldata:/var/lib/mysql
|
||||
|
||||
```yaml
|
||||
postgres:
|
||||
image: postgres
|
||||
labels:
|
||||
# Enables backup of this database
|
||||
restic-compose-backup.postgres: true
|
||||
env_file:
|
||||
postgres-credentials.env
|
||||
volumes:
|
||||
- pgdata:/var/lib/postgresql/data
|
||||
|
||||
volumes:
|
||||
media:
|
||||
mysqldata:
|
||||
mariadbdata:
|
||||
pgdata:
|
||||
cache:
|
||||
```
|
||||
|
||||
## The `rcb` command
|
||||
|
||||
Everything is controlled using the `rcb` command.
|
||||
After configuring backup with labels and restarted
|
||||
the affected services we can quickly view the
|
||||
result using the `status` subcommand.
|
||||
|
||||
```bash
|
||||
$ docker-compose run --rm backup rcb status
|
||||
INFO: Status for compose project 'myproject'
|
||||
INFO: Repository: '<restic repository>'
|
||||
INFO: Backup currently running?: False
|
||||
INFO: --------------- Detected Config ---------------
|
||||
INFO: service: mysql
|
||||
INFO: - mysql (is_ready=True)
|
||||
INFO: service: mariadb
|
||||
INFO: - mariadb (is_ready=True)
|
||||
INFO: service: postgres
|
||||
INFO: - postgres (is_ready=True)
|
||||
INFO: service: web
|
||||
INFO: - volume: media
|
||||
INFO: - volume: /srv/files
|
||||
```
|
||||
|
||||
The `status` subcommand lists what will be backed up and
|
||||
even pings the database services checking their availability.
|
||||
The `restic` command can also be used directly in the container.
|
||||
|
||||
More `rcb` commands can be found in the [documentation].
|
||||
|
||||
## Running Tests
|
||||
|
||||
```bash
|
||||
python setup.py develop
|
||||
pip install -r tests/requirements.txt
|
||||
pytest tests
|
||||
pip install -e ./src/
|
||||
pip install -r src/tests/requirements.txt
|
||||
tox
|
||||
```
|
||||
|
||||
## Building Docs
|
||||
|
||||
```bash
|
||||
pip install -r docs/requirements.txt
|
||||
python setup.py build_sphinx
|
||||
python src/setup.py build_sphinx
|
||||
```
|
||||
|
||||
# Local dev setup
|
||||
|
||||
The git repository contains a simple local setup for development
|
||||
|
||||
```bash
|
||||
# Create an overlay network to link the compose project and stack
|
||||
docker network create --driver overlay --attachable global
|
||||
# Start the compose project
|
||||
docker-compose up -d
|
||||
# Deploy the stack
|
||||
docker stack deploy -c swarm-stack.yml test
|
||||
```
|
||||
|
||||
In dev we should ideally start the backup container manually
|
||||
|
||||
```bash
|
||||
docker-compose run --rm backup sh
|
||||
# pip install the package in the container in editable mode to auto sync changes from host source
|
||||
pip3 install -e .
|
||||
```
|
||||
|
||||
Remember to enable swarm mode with `docker swarm init/join` and disable swarm
|
||||
mode with `docker swarm leave --force` when needed in development (single node setup).
|
||||
|
||||
## Contributing
|
||||
|
||||
Contributions are welcome regardless of experience level. Don't hesitate submitting issues, opening partial or completed pull requests.
|
||||
Contributions are welcome regardless of experience level.
|
||||
Don't hesitate submitting issues, opening partial or completed pull requests.
|
||||
|
||||
[restic]: https://restic.net/
|
||||
[documentation]: https://restic-compose-backup.readthedocs.io
|
||||
|
||||
---
|
||||
This project is sponsored by [zetta.io](https://www.zetta.io)
|
||||
|
||||
[![Zetta.IO](https://raw.githubusercontent.com/ZettaIO/restic-compose-backup/master/.github/logo.png)](https://www.zetta.io)
|
||||
|
|
|
@ -1,30 +1,38 @@
|
|||
version: '3'
|
||||
version: '3.7'
|
||||
services:
|
||||
backup:
|
||||
build: .
|
||||
build: ./src
|
||||
env_file:
|
||||
- restic_compose_backup.env
|
||||
# - alerts.env
|
||||
labels:
|
||||
restic-compose-backup.volumes: true
|
||||
restic-compose-backup.volumes.include: 'src'
|
||||
networks:
|
||||
- default
|
||||
- global
|
||||
volumes:
|
||||
# Map in docker socket
|
||||
- /var/run/docker.sock:/tmp/docker.sock:ro
|
||||
# Map backup database locally
|
||||
# Map local restic repository for dev
|
||||
- ./restic_data:/restic_data
|
||||
# Map restic cache
|
||||
- ./restic_cache:/cache
|
||||
# Map in project source in dev
|
||||
- .:/restic-compose-backup
|
||||
- ./src:/restic-compose-backup
|
||||
web:
|
||||
image: nginx
|
||||
labels:
|
||||
restic-compose-backup.volumes: true
|
||||
restic-compose-backup.volumes.include: "/tests"
|
||||
volumes:
|
||||
- ./tests:/srv/tests
|
||||
- ./src/tests:/srv/tests
|
||||
- ./.vscode:/srv/code
|
||||
environment:
|
||||
- SOME_VALUE=test
|
||||
- ANOTHER_VALUE=1
|
||||
|
||||
mysql:
|
||||
mysql5:
|
||||
image: mysql:5
|
||||
labels:
|
||||
restic-compose-backup.mysql: true
|
||||
|
@ -34,7 +42,19 @@ services:
|
|||
- MYSQL_USER=myuser
|
||||
- MYSQL_PASSWORD=mypassword
|
||||
volumes:
|
||||
- mysqldata:/var/lib/mysql
|
||||
- mysqldata5:/var/lib/mysql
|
||||
|
||||
mysql8:
|
||||
image: mysql:8
|
||||
labels:
|
||||
restic-compose-backup.mysql: true
|
||||
environment:
|
||||
- MYSQL_ROOT_PASSWORD=my-secret-pw
|
||||
- MYSQL_DATABASE=mydb
|
||||
- MYSQL_USER=myuser
|
||||
- MYSQL_PASSWORD=mypassword
|
||||
volumes:
|
||||
- mysqldata8:/var/lib/mysql
|
||||
|
||||
mariadb:
|
||||
image: mariadb:10
|
||||
|
@ -60,6 +80,11 @@ services:
|
|||
- pgdata:/var/lib/postgresql/data
|
||||
|
||||
volumes:
|
||||
mysqldata:
|
||||
mysqldata5:
|
||||
mysqldata8:
|
||||
mariadbdata:
|
||||
pgdata:
|
||||
|
||||
networks:
|
||||
global:
|
||||
external: true
|
||||
|
|
|
@ -22,8 +22,7 @@ copyright = '2019, Zetta.IO Technology AS'
|
|||
author = 'Zetta.IO Technology AS'
|
||||
|
||||
# The full version, including alpha/beta/rc tags
|
||||
release = '0.2.0'
|
||||
|
||||
release = '0.6.0'
|
||||
|
||||
# -- General configuration ---------------------------------------------------
|
||||
|
||||
|
|
|
@ -0,0 +1,30 @@
|
|||
Advanced
|
||||
--------
|
||||
|
||||
Currently work in progress. These are only notes :D
|
||||
|
||||
Temp Notes
|
||||
~~~~~~~~~~
|
||||
|
||||
* Quick setup guide from start to end
|
||||
* we group snapshots by path when forgetting
|
||||
* explain rcb commands
|
||||
* examples of using restic directly
|
||||
* Explain what happens during backup process
|
||||
* Explain the backup process container
|
||||
* cache directory
|
||||
* Not displaying passwords in logs
|
||||
|
||||
Inner workings
|
||||
~~~~~~~~~~~~~~
|
||||
|
||||
* Each service in the compose setup is configured with a label
|
||||
to enable backup of volumes or databases
|
||||
* When backup starts a new instance of the container is created
|
||||
mapping in all the needed volumes. It will copy networks etc
|
||||
to ensure databases can be reached
|
||||
* Volumes are mounted to `/volumes/<service_name>/<path>`
|
||||
in the backup process container. `/volumes` is pushed into restic
|
||||
* Databases are backed up from stdin / dumps into restic using path
|
||||
`/databases/<service_name>/dump.sql`
|
||||
* Cron triggers backup at 2AM every day
|
|
@ -0,0 +1,447 @@
|
|||
Configuration
|
||||
=============
|
||||
|
||||
Environment Variables
|
||||
---------------------
|
||||
|
||||
RESTIC_REPOSITORY
|
||||
~~~~~~~~~~~~~~~~~
|
||||
|
||||
Sets the restic repository path.
|
||||
|
||||
This is a standard environment variable
|
||||
the ``restic`` command will read making it simple for
|
||||
us to enter the container and use the restic command directly.
|
||||
|
||||
More about this value and supported backends:
|
||||
https://restic.readthedocs.io/en/stable/030_preparing_a_new_repo.html
|
||||
|
||||
RESTIC_PASSWORD
|
||||
~~~~~~~~~~~~~~~
|
||||
|
||||
Sets the password is used to encrypt/decrypt data.
|
||||
Losing this password will make recovery impossible.
|
||||
|
||||
This is a standard environment variable the ``restic``
|
||||
command will read making it simple for us to enter the
|
||||
container running the command directly.
|
||||
|
||||
RESTIC_KEEP_DAILY
|
||||
~~~~~~~~~~~~~~~~~
|
||||
|
||||
**Default value**: ``7``
|
||||
|
||||
How many daily snapshots (grouped by path) back in time we
|
||||
want to keep. This is passed to restic in the
|
||||
``forget --keep-daily`` option.
|
||||
|
||||
RESTIC_KEEP_WEEKLY
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
**Default value**: ``4``
|
||||
|
||||
How many weeks back we should keep at least one snapshot
|
||||
(grouped by path). This is passed to restic in the
|
||||
``forget --keep-weekly`` option.
|
||||
|
||||
RESTIC_KEEP_MONTHLY
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
**Default value**: ``12``
|
||||
|
||||
How many months back we should keep at least on snapshot
|
||||
(grouped by path). This is passed to restic in the
|
||||
``forget --keep-monthly`` option.
|
||||
|
||||
The schedule parameters only accepts numeric values
|
||||
and is validated when the container starts. Providing
|
||||
values cron does not understand will stall all backup.
|
||||
|
||||
RESTIC_KEEP_YEARLY
|
||||
~~~~~~~~~~~~~~~~~~
|
||||
|
||||
**Default value**: ``3``
|
||||
|
||||
How many years back we should keep at least one snapshot
|
||||
(grouped by path). This is passed to restic in the
|
||||
``forget --keep-yearly`` option.
|
||||
|
||||
CRON_SCHEDULE
|
||||
~~~~~~~~~~~~~
|
||||
|
||||
**Default value**: ``0 2 * * *`` (daily at 02:00)
|
||||
|
||||
The cron schedule parameters. The crontab is generated when the
|
||||
container starts from the ``CRON_SCHEDULE`` and ``CRON_COMMAND``
|
||||
env variables.
|
||||
|
||||
.. code::
|
||||
|
||||
┌───────────── minute (0 - 59)
|
||||
│ ┌───────────── hour (0 - 23)
|
||||
│ │ ┌───────────── day of the month (1 - 31)
|
||||
│ │ │ ┌───────────── month (1 - 12)
|
||||
│ │ │ │ ┌───────────── day of the week (0 - 6) (Sunday to Saturday)
|
||||
│ │ │ │ │
|
||||
│ │ │ │ │
|
||||
│ │ │ │ │
|
||||
* * * * * command to execute
|
||||
|
||||
CRON_COMMAND
|
||||
~~~~~~~~~~~~
|
||||
|
||||
**Default value**: ``source /env.sh && rcb backup > /proc/1/fd/1``
|
||||
|
||||
The command executed in the crontab. A single line is generated when
|
||||
the container starts from the ``CRON_SCHEDULE`` and ``CRON_COMMAND``
|
||||
environment variables.
|
||||
|
||||
The default command sources a dump of all env vars, runs the
|
||||
backup command and directs output to pid 1 so it appears in
|
||||
docker logs.
|
||||
|
||||
By default the crontab will look like this::
|
||||
|
||||
0 2 * * * source /env.sh && rcb backup > /proc/1/fd/1
|
||||
|
||||
LOG_LEVEL
|
||||
~~~~~~~~~
|
||||
|
||||
**Default value**: ``info``
|
||||
|
||||
Log level for the ``rcb`` command. Valid values are
|
||||
``debug``, ``info``, ``warning``, ``error``.
|
||||
|
||||
EMAIL_HOST
|
||||
~~~~~~~~~~
|
||||
|
||||
The email host to use.
|
||||
|
||||
Alerts can be tested using the ``rcb alerts`` command.
|
||||
This will send a test message to all configured alert
|
||||
backends.
|
||||
|
||||
EMAIL_PORT
|
||||
~~~~~~~~~~
|
||||
|
||||
The port to connect to
|
||||
|
||||
Alerts can be tested using the ``rcb alerts`` command.
|
||||
This will send a test message to all configured alert
|
||||
backends.
|
||||
|
||||
EMAIL_HOST_USER
|
||||
~~~~~~~~~~~~~~~
|
||||
|
||||
The user of the sender account
|
||||
|
||||
Alerts can be tested using the ``rcb alerts`` command.
|
||||
This will send a test message to all configured alert
|
||||
backends.
|
||||
|
||||
EMAIL_HOST_PASSWORD
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The password for the sender account
|
||||
|
||||
Alerts can be tested using the ``rcb alerts`` command.
|
||||
This will send a test message to all configured alert
|
||||
backends.
|
||||
|
||||
EMAIL_SEND_TO
|
||||
~~~~~~~~~~~~~
|
||||
|
||||
The email address to send alerts
|
||||
|
||||
Alerts can be tested using the ``rcb alerts`` command.
|
||||
This will send a test message to all configured alert
|
||||
backends.
|
||||
|
||||
DISCORD_WEBHOOK
|
||||
~~~~~~~~~~~~~~~
|
||||
|
||||
The discord webhook url. And administrator can quickly set this up
|
||||
by going to server settings in the discord client and create
|
||||
a webhook that will post embedded messages to a specific channel.
|
||||
|
||||
The url usually looks like this: ``https://discordapp.com/api/webhooks/...```
|
||||
|
||||
DOCKER_HOST
|
||||
~~~~~~~~~~~
|
||||
|
||||
**Default value**: ``unix://tmp/docker.sock``
|
||||
|
||||
The socket or host of the docker service.
|
||||
|
||||
DOCKER_TLS_VERIFY
|
||||
~~~~~~~~~~~~~~~~~
|
||||
|
||||
If defined verify the host against a CA certificate.
|
||||
Path to certs is defined in ``DOCKER_CERT_PATH``
|
||||
and can be copied or mapped into this backup container.
|
||||
|
||||
DOCKER_CERT_PATH
|
||||
~~~~~~~~~~~~~~~~
|
||||
|
||||
A path to a directory containing TLS certificates to use when
|
||||
connecting to the Docker host. Combined with ``DOCKER_TLS_VERIFY``
|
||||
this can be used to talk to docker through TLS in cases
|
||||
were we cannot map in the docker socket.
|
||||
|
||||
INCLUDE_PROJECT_NAME
|
||||
~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Define this environment variable if your backup destination
|
||||
paths needs project name as a prefix. This is useful
|
||||
when running multiple projects.
|
||||
|
||||
EXCLUDE_BIND_MOUNTS
|
||||
~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Docker has to volumes types. Binds and volumes.
|
||||
Volumes are docker volumes (``docker`volume list``).
|
||||
Binds are paths mapped into the container from
|
||||
the host for example in the ``volumes`` section
|
||||
of a service.
|
||||
|
||||
If defined all host binds will be ignored globally.
|
||||
This is useful when you only care about actual
|
||||
docker volumes. Often host binds are only used
|
||||
for mapping in configuration. This saves the user
|
||||
from manually excluding these bind volumes.
|
||||
|
||||
SWARM_MODE
|
||||
~~~~~~~~~~
|
||||
|
||||
If defined containers in swarm stacks are also evaluated.
|
||||
|
||||
Compose Labels
|
||||
--------------
|
||||
|
||||
What is backed up is controlled by simple labels in the compose
|
||||
yaml file. At any point we can verify this configuration
|
||||
by running the ``rcb status`` command.
|
||||
|
||||
.. code:
|
||||
|
||||
$ docker-compose run --rm backup rcb status
|
||||
INFO: Status for compose project 'myproject'
|
||||
INFO: Repository: '<restic repository>'
|
||||
INFO: Backup currently running?: False
|
||||
INFO: --------------- Detected Config ---------------
|
||||
INFO: service: mysql
|
||||
INFO: - mysql (is_ready=True)
|
||||
INFO: service: mariadb
|
||||
INFO: - mariadb (is_ready=True)
|
||||
INFO: service: postgres
|
||||
INFO: - postgres (is_ready=True)
|
||||
INFO: service: web
|
||||
INFO: - volume: media
|
||||
INFO: - volume: /srv/files
|
||||
|
||||
Here we can see what volumes and databases are detected for backup.
|
||||
|
||||
Volumes
|
||||
~~~~~~~
|
||||
|
||||
To enable volume backup for a service we simply add the
|
||||
`restic-compose-backup.volumes: true` label. The value
|
||||
must be ``true``.
|
||||
|
||||
Example:
|
||||
|
||||
.. code:: yaml
|
||||
|
||||
myservice:
|
||||
image: some_image
|
||||
labels:
|
||||
restic-compose-backup.volumes: true
|
||||
volumes:
|
||||
- uploaded_media:/srv/media
|
||||
- uploaded_files:/srv/files
|
||||
- /srv/data:/srv/data
|
||||
|
||||
volumes:
|
||||
media:
|
||||
files:
|
||||
|
||||
This will back up the three volumes mounted to this service.
|
||||
Their path in restic will be:
|
||||
|
||||
- /volumes/myservice/srv/media
|
||||
- /volumes/myservice/srv/files
|
||||
- /volumes/myservice/srv/data
|
||||
|
||||
A simple `include` and `exclude` filter for what volumes
|
||||
should be backed up is also available. Note that this
|
||||
includes or excludes entire volumes and are not include/exclude
|
||||
patterns for files in the volumes.
|
||||
|
||||
.. note:: The ``exclude`` and ``include`` filtering is applied on
|
||||
the source path, not the destination.
|
||||
|
||||
Include example including two volumes only:
|
||||
|
||||
.. code:: yaml
|
||||
|
||||
myservice:
|
||||
image: some_image
|
||||
labels:
|
||||
restic-compose-backup.volumes: true
|
||||
restic-compose-backup.volumes.include: "uploaded_media,uploaded_files"
|
||||
volumes:
|
||||
- uploaded_media:/srv/media
|
||||
- uploaded_files:/srv/files
|
||||
- /srv/data:/srv/data
|
||||
|
||||
volumes:
|
||||
media:
|
||||
files:
|
||||
|
||||
Exclude example achieving the same result as the example above.
|
||||
|
||||
.. code:: yaml
|
||||
|
||||
example:
|
||||
image: some_image
|
||||
labels:
|
||||
restic-compose-backup.volumes: true
|
||||
restic-compose-backup.volumes.exclude: "data"
|
||||
volumes:
|
||||
# Excluded by filter
|
||||
- media:/srv/media
|
||||
# Backed up
|
||||
- files:/srv/files
|
||||
- /srv/data:/srv/data
|
||||
|
||||
volumes:
|
||||
media:
|
||||
files:
|
||||
|
||||
The ``exclude`` and ``include`` tag can be used together
|
||||
in more complex situations.
|
||||
|
||||
mariadb
|
||||
~~~~~~~
|
||||
|
||||
To enable backup of mariadb simply add the
|
||||
``restic-compose-backup.mariadb: true`` label.
|
||||
|
||||
Credentials are fetched from the following environment
|
||||
variables in the mariadb service. This is the standard
|
||||
when using the official mariadb_ image.
|
||||
|
||||
.. code::
|
||||
|
||||
MYSQL_USER
|
||||
MYSQL_PASSWORD
|
||||
|
||||
Backups are done by dumping all databases directly into
|
||||
restic through stdin using ``mysqldump``. It will appear
|
||||
in restic as a separate snapshot with path
|
||||
``/databases/<service_name>/all_databases.sql``.
|
||||
|
||||
.. warning: This will only back up the databases the
|
||||
``MYSQL_USER` has access to. If you have multiple
|
||||
databases this must be taken into consideration.
|
||||
|
||||
Example:
|
||||
|
||||
.. code:: yaml
|
||||
|
||||
mariadb:
|
||||
image: mariadb:10
|
||||
labels:
|
||||
restic-compose-backup.mariadb: true
|
||||
env_file:
|
||||
mariadb-credentials.env
|
||||
volumes:
|
||||
- mariadb:/var/lib/mysql
|
||||
|
||||
volumes:
|
||||
mariadb:
|
||||
|
||||
mysql
|
||||
~~~~~
|
||||
|
||||
To enable backup of mysql simply add the
|
||||
``restic-compose-backup.mysql: true`` label.
|
||||
|
||||
Credentials are fetched from the following environment
|
||||
variables in the mysql service. This is the standard
|
||||
when using the official mysql_ image.
|
||||
|
||||
.. code::
|
||||
|
||||
MYSQL_USER
|
||||
MYSQL_PASSWORD
|
||||
|
||||
Backups are done by dumping all databases directly into
|
||||
restic through stdin using ``mysqldump``. It will appear
|
||||
in restic as a separate snapshot with path
|
||||
``/databases/<service_name>/all_databases.sql``.
|
||||
|
||||
.. warning: This will only back up the databases the
|
||||
``MYSQL_USER` has access to. If you have multiple
|
||||
databases this must be taken into consideration.
|
||||
|
||||
Example:
|
||||
|
||||
.. code:: yaml
|
||||
|
||||
mysql:
|
||||
image: mysql:5
|
||||
labels:
|
||||
restic-compose-backup.mysql: true
|
||||
env_file:
|
||||
mysql-credentials.env
|
||||
volumes:
|
||||
- mysql:/var/lib/mysql
|
||||
|
||||
volumes:
|
||||
mysql:
|
||||
|
||||
postgres
|
||||
~~~~~~~~
|
||||
|
||||
To enable backup of mysql simply add the
|
||||
``restic-compose-backup.postgres: true`` label.
|
||||
|
||||
Credentials are fetched from the following environment
|
||||
variables in the postgres service. This is the standard
|
||||
when using the official postgres_ image.
|
||||
|
||||
.. code::
|
||||
|
||||
POSTGRES_USER
|
||||
POSTGRES_PASSWORD
|
||||
POSTGRES_DB
|
||||
|
||||
Backups are done by dumping the ``POSTGRES_DB`` directly into
|
||||
restic through stdin using ``pg_dump``. It will appear
|
||||
in restic as a separate snapshot with path
|
||||
``/databases/<service_name>/<POSTGRES_DB>.sql``.
|
||||
|
||||
.. warning:: Currently only the ``POSTGRES_DB`` database
|
||||
is dumped.
|
||||
|
||||
Example:
|
||||
|
||||
.. code:: yaml
|
||||
|
||||
postgres:
|
||||
image: postgres:11
|
||||
labels:
|
||||
# Enables backup of this database
|
||||
restic-compose-backup.postgres: true
|
||||
env_file:
|
||||
postgres-credentials.env
|
||||
volumes:
|
||||
- pgdata:/var/lib/postgresql/data
|
||||
|
||||
volumes:
|
||||
pgdata:
|
||||
|
||||
.. _mariadb: https://hub.docker.com/_/mariadb
|
||||
.. _mysql: https://hub.docker.com/_/mysql
|
||||
.. _postgres: https://hub.docker.com/_/postgres
|
|
@ -0,0 +1,53 @@
|
|||
Introduction
|
||||
============
|
||||
|
||||
|
||||
Install
|
||||
-------
|
||||
|
||||
restic-compose-backup is available at docker `docker hub`_.
|
||||
|
||||
.. code::
|
||||
|
||||
docker pull restic-compose-backup
|
||||
|
||||
Optionally it can be built from source using the github_ repository.
|
||||
|
||||
.. code:: bash
|
||||
|
||||
git clone https://github.com/ZettaIO/restic-compose-backup.git
|
||||
cd restic-compose-backup
|
||||
# Build and tag the image locally
|
||||
docker build src/ --tag restic-compose-backup
|
||||
|
||||
Bug reports and issues
|
||||
----------------------
|
||||
|
||||
Please report bugs an issues on github_
|
||||
|
||||
Development setup
|
||||
-----------------
|
||||
|
||||
Getting started with local development is fairly simple.
|
||||
The github_ repository contains a simple ``docker-compose.yaml``
|
||||
|
||||
.. code:: bash
|
||||
|
||||
docker-compose up -d
|
||||
# Enter the container in sh
|
||||
docker-compose run --rm backup sh
|
||||
|
||||
The dev compose setup maps in the source from the host
|
||||
and the spawned backup container will inherit all
|
||||
the volumes from the backup service ensuring code changes
|
||||
propagates during development.
|
||||
|
||||
Set up a local venv and install the package in development mode::
|
||||
|
||||
python -m venv .venv
|
||||
. .venv/bin/activate
|
||||
pip install -e ./src
|
||||
|
||||
|
||||
.. _docker hub: https://hub.docker.com/r/zettaio/restic-compose-backup
|
||||
.. _github: https://github.com/ZettaIO/restic-compose-backup
|
|
@ -0,0 +1,199 @@
|
|||
|
||||
The `rcb` command
|
||||
-----------------
|
||||
|
||||
The ``rcb`` command is is basically what this entire project is.
|
||||
It provides useful commands interacting with the compose setup
|
||||
and restic.
|
||||
|
||||
The command can be executed inside the container or through ``run``.
|
||||
|
||||
.. code:: bash
|
||||
|
||||
# Get the current status using run
|
||||
$ docker-compose run --rm backup rcb status
|
||||
|
||||
# by entering the container
|
||||
$ docker-compose exec backup sh
|
||||
/restic-compose-backup # rcb status
|
||||
|
||||
Log level can be overridden by using the ``--log-level``
|
||||
flag. This can help you better understand what is going on
|
||||
for example by using ``--log-level debug``.
|
||||
|
||||
version
|
||||
~~~~~~~
|
||||
|
||||
Displays the version.
|
||||
|
||||
Example output::
|
||||
|
||||
/restic-compose-backup # rcb version
|
||||
0.4.0
|
||||
|
||||
status
|
||||
~~~~~~
|
||||
|
||||
Shows the general status of our setup. The command is doing
|
||||
the following operations
|
||||
|
||||
- Displays the name of the compose setup
|
||||
- Displays the repository path
|
||||
- Tells us if a backup is currently running
|
||||
- Removes stale backup process containers if the exist
|
||||
- Checks is the repository is initialized
|
||||
- Initializes the repository if this is not already done
|
||||
- Displays what volumes and databases are flagged for backup
|
||||
|
||||
Example output::
|
||||
|
||||
INFO: Status for compose project 'myproject'
|
||||
INFO: Repository: '<restic repository>'
|
||||
INFO: Backup currently running?: False
|
||||
INFO: --------------- Detected Config ---------------
|
||||
INFO: service: mysql
|
||||
INFO: - mysql (is_ready=True)
|
||||
INFO: service: mariadb
|
||||
INFO: - mariadb (is_ready=True)
|
||||
INFO: service: postgres
|
||||
INFO: - postgres (is_ready=True)
|
||||
INFO: service: web
|
||||
INFO: - volume: media
|
||||
INFO: - volume: /srv/files
|
||||
|
||||
alert
|
||||
~~~~~
|
||||
|
||||
Sends a test message to all configured alert backends
|
||||
and is there for you to verify that alerts are in
|
||||
fact working and configured correctly.
|
||||
|
||||
The format of this message::
|
||||
|
||||
subject: myproject: Test Alert
|
||||
body: Test message
|
||||
|
||||
snapshots
|
||||
~~~~~~~~~
|
||||
|
||||
Displays the latest snapshots in restic. This can also
|
||||
be done with ``restic snapshots``.
|
||||
|
||||
Example output::
|
||||
|
||||
/restic-compose-backup # rcb snapshots
|
||||
repository f325264e opened successfully, password is correct
|
||||
ID Time Host Tags Paths
|
||||
---------------------------------------------------------------------------------------------
|
||||
19928e1c 2019-12-09 02:07:44 b3038db04ec1 /volumes
|
||||
7a642f37 2019-12-09 02:07:45 b3038db04ec1 /databases/mysql/all_databases.sql
|
||||
883dada4 2019-12-09 02:07:46 b3038db04ec1 /databases/mariadb/all_databases.sql
|
||||
76ef2457 2019-12-09 02:07:47 b3038db04ec1 /databases/postgres/test.sql
|
||||
---------------------------------------------------------------------------------------------
|
||||
4 snapshots
|
||||
|
||||
backup
|
||||
~~~~~~
|
||||
|
||||
Starts a backup process by spawning a new docker container.
|
||||
The network stack, mounted volumes, env vars etc. from the
|
||||
backup service are copied to this container.
|
||||
|
||||
We attach to this container and stream the logs and delete
|
||||
the container with the backup process is completed. If the
|
||||
container for any reason should not be deleted, it will
|
||||
be in next backup run as these containers are tagged with
|
||||
a unique label and detected.
|
||||
|
||||
If anything goes wrong the exist status of the container
|
||||
is non-zero and the logs from this backup run will be sent
|
||||
to the user through the configure alerts.
|
||||
|
||||
This command is by default called by cron every
|
||||
day at 02:00 unless configured otherwise. We can also run this
|
||||
manually is needed.
|
||||
|
||||
Running this command will do the following:
|
||||
|
||||
* Checks if a backup process is already running.
|
||||
If so, we alert the user and abort
|
||||
* Gathers all the volumes configured for backup and starts
|
||||
the backup process with these volumes mounted into ``/volumes``
|
||||
* Checks the status of the process and reports to the user
|
||||
if anything failed
|
||||
|
||||
The backup process does the following:
|
||||
|
||||
* ``status`` is first called to ensure everything is ok
|
||||
* Backs up ``/volumes`` if any volumes were mounted
|
||||
* Backs up each configured database
|
||||
* Runs ``cleanup`` purging snapshots based on the configured policy
|
||||
* Checks the health of the repository
|
||||
|
||||
Example::
|
||||
|
||||
$ docker-compose exec backup sh
|
||||
/restic-compose-backup # rcb backup
|
||||
INFO: Starting backup container
|
||||
INFO: Backup process container: loving_jepsen
|
||||
INFO: 2019-12-09 04:50:22,817 - INFO: Status for compose project 'restic-compose-backup'
|
||||
INFO: 2019-12-09 04:50:22,817 - INFO: Repository: '/restic_data'
|
||||
INFO: 2019-12-09 04:50:22,817 - INFO: Backup currently running?: True
|
||||
INFO: 2019-12-09 04:50:23,701 - INFO: ------------------------- Detected Config -------------------------
|
||||
INFO: 2019-12-09 04:50:23,701 - INFO: service: mysql
|
||||
INFO: 2019-12-09 04:50:23,718 - INFO: - mysql (is_ready=True)
|
||||
INFO: 2019-12-09 04:50:23,718 - INFO: service: mariadb
|
||||
INFO: 2019-12-09 04:50:23,726 - INFO: - mariadb (is_ready=True)
|
||||
INFO: 2019-12-09 04:50:23,727 - INFO: service: postgres
|
||||
INFO: 2019-12-09 04:50:23,734 - INFO: - postgres (is_ready=True)
|
||||
INFO: 2019-12-09 04:50:23,735 - INFO: service: web
|
||||
INFO: 2019-12-09 04:50:23,736 - INFO: - volume: /some/volume
|
||||
INFO: 2019-12-09 04:50:23,736 - INFO: -------------------------------------------------------------------
|
||||
INFO: 2019-12-09 04:50:23,736 - INFO: Backing up volumes
|
||||
INFO: 2019-12-09 04:50:24,661 - INFO: Backing up databases
|
||||
INFO: 2019-12-09 04:50:24,661 - INFO: Backing up mysql in service mysql
|
||||
INFO: 2019-12-09 04:50:25,643 - INFO: Backing up mariadb in service mariadb
|
||||
INFO: 2019-12-09 04:50:26,580 - INFO: Backing up postgres in service postgres
|
||||
INFO: 2019-12-09 04:50:27,555 - INFO: Forget outdated snapshots
|
||||
INFO: 2019-12-09 04:50:28,457 - INFO: Prune stale data freeing storage space
|
||||
INFO: 2019-12-09 04:50:31,547 - INFO: Checking the repository for errors
|
||||
INFO: 2019-12-09 04:50:32,869 - INFO: Backup completed
|
||||
INFO: Backup container exit code: 0
|
||||
|
||||
crontab
|
||||
~~~~~~~
|
||||
|
||||
Generates and verifies the crontab. This is done automatically when
|
||||
the container starts. It can be user to verify the configuration.
|
||||
|
||||
Example output::
|
||||
|
||||
/restic-compose-backup # rcb crontab
|
||||
10 2 * * * source /env.sh && rcb backup > /proc/1/fd/1
|
||||
|
||||
cleanup
|
||||
~~~~~~~
|
||||
|
||||
Purges all snapshots based on the configured policy. (``RESTIC_KEEP_*``
|
||||
env variables). It runs ``restic forget`` and ``restic purge``.
|
||||
|
||||
Example output::
|
||||
|
||||
/restic-compose-backup # rcb cleanup
|
||||
2019-12-09 05:09:52,892 - INFO: Forget outdated snapshots
|
||||
2019-12-09 05:09:53,776 - INFO: Prune stale data freeing storage space
|
||||
|
||||
start-backup-process
|
||||
~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
This can only be executed by the backup process container.
|
||||
Attempting to run this command in the backup service
|
||||
will simply tell you it's not possible.
|
||||
|
||||
The backup process is doing the following:
|
||||
|
||||
* ``status`` is first called to ensure everything is ok
|
||||
* Backs up ``/volumes`` if any volumes were mounted
|
||||
* Backs up each configured database
|
||||
* Runs ``cleanup`` purging snapshots based on the configured policy
|
||||
* Checks the health of the repository
|
|
@ -6,12 +6,16 @@
|
|||
Welcome to restic-compose-backup's documentation!
|
||||
=================================================
|
||||
|
||||
Simple backup with restic for small to medium docker-compose setups.
|
||||
Simple backup with restic_ for docker-compose setups.
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:maxdepth: 3
|
||||
:caption: Contents:
|
||||
|
||||
guide/install
|
||||
guide/configuration
|
||||
guide/rcb
|
||||
guide/advanced
|
||||
|
||||
Indices and tables
|
||||
==================
|
||||
|
@ -19,3 +23,5 @@ Indices and tables
|
|||
* :ref:`genindex`
|
||||
* :ref:`modindex`
|
||||
* :ref:`search`
|
||||
|
||||
.. _restic: https://restic.net/
|
||||
|
|
|
@ -0,0 +1,196 @@
|
|||
{
|
||||
"Id": "efa5196b4959648e3efcf5ae9f24bc4032849c2665805a5b405216f343b4decd",
|
||||
"Created": "2020-03-05T21:07:34.88927951Z",
|
||||
"Path": "docker-entrypoint.sh",
|
||||
"Args": ["mysqld"],
|
||||
"State": {
|
||||
"Status": "running",
|
||||
"Running": true,
|
||||
"Paused": false,
|
||||
"Restarting": false,
|
||||
"OOMKilled": false,
|
||||
"Dead": false,
|
||||
"Pid": 4887,
|
||||
"ExitCode": 0,
|
||||
"Error": "",
|
||||
"StartedAt": "2020-03-06T01:31:39.728842925Z",
|
||||
"FinishedAt": "2020-03-06T01:31:33.847583199Z"
|
||||
},
|
||||
"Image": "sha256:1fd0e719c4952e22a99e30662fdd7daad53e7e53fbe135d543cc6b82be213951",
|
||||
"ResolvConfPath": "/var/lib/docker/containers/efa5196b4959648e3efcf5ae9f24bc4032849c2665805a5b405216f343b4decd/resolv.conf",
|
||||
"HostnamePath": "/var/lib/docker/containers/efa5196b4959648e3efcf5ae9f24bc4032849c2665805a5b405216f343b4decd/hostname",
|
||||
"HostsPath": "/var/lib/docker/containers/efa5196b4959648e3efcf5ae9f24bc4032849c2665805a5b405216f343b4decd/hosts",
|
||||
"LogPath": "/var/lib/docker/containers/efa5196b4959648e3efcf5ae9f24bc4032849c2665805a5b405216f343b4decd/efa5196b4959648e3efcf5ae9f24bc4032849c2665805a5b405216f343b4decd-json.log",
|
||||
"Name": "/restic-compose-backup_mariadb_1",
|
||||
"RestartCount": 0,
|
||||
"Driver": "overlay2",
|
||||
"Platform": "linux",
|
||||
"MountLabel": "",
|
||||
"ProcessLabel": "",
|
||||
"AppArmorProfile": "",
|
||||
"ExecIDs": null,
|
||||
"HostConfig": {
|
||||
"Binds": ["restic-compose-backup_mariadbdata:/var/lib/mysql:rw"],
|
||||
"ContainerIDFile": "",
|
||||
"LogConfig": {
|
||||
"Type": "json-file",
|
||||
"Config": {}
|
||||
},
|
||||
"NetworkMode": "restic-compose-backup_default",
|
||||
"PortBindings": {},
|
||||
"RestartPolicy": {
|
||||
"Name": "",
|
||||
"MaximumRetryCount": 0
|
||||
},
|
||||
"AutoRemove": false,
|
||||
"VolumeDriver": "",
|
||||
"VolumesFrom": [],
|
||||
"CapAdd": null,
|
||||
"CapDrop": null,
|
||||
"Capabilities": null,
|
||||
"Dns": null,
|
||||
"DnsOptions": null,
|
||||
"DnsSearch": null,
|
||||
"ExtraHosts": null,
|
||||
"GroupAdd": null,
|
||||
"IpcMode": "shareable",
|
||||
"Cgroup": "",
|
||||
"Links": null,
|
||||
"OomScoreAdj": 0,
|
||||
"PidMode": "",
|
||||
"Privileged": false,
|
||||
"PublishAllPorts": false,
|
||||
"ReadonlyRootfs": false,
|
||||
"SecurityOpt": null,
|
||||
"UTSMode": "",
|
||||
"UsernsMode": "",
|
||||
"ShmSize": 67108864,
|
||||
"Runtime": "runc",
|
||||
"ConsoleSize": [0, 0],
|
||||
"Isolation": "",
|
||||
"CpuShares": 0,
|
||||
"Memory": 0,
|
||||
"NanoCpus": 0,
|
||||
"CgroupParent": "",
|
||||
"BlkioWeight": 0,
|
||||
"BlkioWeightDevice": null,
|
||||
"BlkioDeviceReadBps": null,
|
||||
"BlkioDeviceWriteBps": null,
|
||||
"BlkioDeviceReadIOps": null,
|
||||
"BlkioDeviceWriteIOps": null,
|
||||
"CpuPeriod": 0,
|
||||
"CpuQuota": 0,
|
||||
"CpuRealtimePeriod": 0,
|
||||
"CpuRealtimeRuntime": 0,
|
||||
"CpusetCpus": "",
|
||||
"CpusetMems": "",
|
||||
"Devices": null,
|
||||
"DeviceCgroupRules": null,
|
||||
"DeviceRequests": null,
|
||||
"KernelMemory": 0,
|
||||
"KernelMemoryTCP": 0,
|
||||
"MemoryReservation": 0,
|
||||
"MemorySwap": 0,
|
||||
"MemorySwappiness": null,
|
||||
"OomKillDisable": false,
|
||||
"PidsLimit": null,
|
||||
"Ulimits": null,
|
||||
"CpuCount": 0,
|
||||
"CpuPercent": 0,
|
||||
"IOMaximumIOps": 0,
|
||||
"IOMaximumBandwidth": 0,
|
||||
"MaskedPaths": ["/proc/asound", "/proc/acpi", "/proc/kcore", "/proc/keys", "/proc/latency_stats", "/proc/timer_list", "/proc/timer_stats", "/proc/sched_debug", "/proc/scsi", "/sys/firmware"],
|
||||
"ReadonlyPaths": ["/proc/bus", "/proc/fs", "/proc/irq", "/proc/sys", "/proc/sysrq-trigger"]
|
||||
},
|
||||
"GraphDriver": {
|
||||
"Data": {
|
||||
"LowerDir": "/var/lib/docker/overlay2/96e51e6162c0cb4385248375192ec777dd42b3ae7973e402de351f3932c502d0-init/diff:/var/lib/docker/overlay2/38780a41f93b7a20de03f1d76febb885f9213906fb30bad17cb3ad231fb7ce43/diff:/var/lib/docker/overlay2/a2abce521690b1baf6aa61e109a4659cb4272936871bc1afa73271eb8e453449/diff:/var/lib/docker/overlay2/a696286588d1d33b994b7f6e31c176c5f7e67c4f757d730323a7b6591d55f786/diff:/var/lib/docker/overlay2/c4bd8133c0d9547945d38a9998439082ce7b53df7e64737add5a5c824e6f67f2/diff:/var/lib/docker/overlay2/110e275ef21b8c9cc2cd0cce312fed5aabceb056460f637b958dfee56b7b3be8/diff:/var/lib/docker/overlay2/831c8a624e424f298766028e76a8ac08df0c5cf4564f63cae61330a8bce0cf63/diff:/var/lib/docker/overlay2/7ad8ae774951ec40c68b0993ef07ef3d70aa8aed44ea9f1e4d943ca5404cc717/diff:/var/lib/docker/overlay2/19bca9fb61ef1156f8a97313c126a6c06d7fe44a6c49e3affe16f50f2d5e56ff/diff:/var/lib/docker/overlay2/dcd4dda04d06b0a0c7e78517c6209fd67735b3027afda2c85a92de37ff7297d1/diff:/var/lib/docker/overlay2/babf41f5fe1f7b88c17cfce27214a4ad9473b0f8e0f118db948d2acddf4d4798/diff:/var/lib/docker/overlay2/b5f97865010acd5b04b4031d6223cd0b34fab89267891d61256ea16936be52f8/diff:/var/lib/docker/overlay2/6aba0159141ebb6d6783181d154c65046447b7d2bebce65d44c4939ba7943cca/diff:/var/lib/docker/overlay2/c71c34fe0e7e95409a9fc18698f0aee505940fd96aa3718836e2d89f3cfb2d49/diff:/var/lib/docker/overlay2/3be993436e2a6764a6c3c57a2e948f7a57e45ed0ec26cdd3366f4c1106c69869/diff",
|
||||
"MergedDir": "/var/lib/docker/overlay2/96e51e6162c0cb4385248375192ec777dd42b3ae7973e402de351f3932c502d0/merged",
|
||||
"UpperDir": "/var/lib/docker/overlay2/96e51e6162c0cb4385248375192ec777dd42b3ae7973e402de351f3932c502d0/diff",
|
||||
"WorkDir": "/var/lib/docker/overlay2/96e51e6162c0cb4385248375192ec777dd42b3ae7973e402de351f3932c502d0/work"
|
||||
},
|
||||
"Name": "overlay2"
|
||||
},
|
||||
"Mounts": [{
|
||||
"Type": "volume",
|
||||
"Name": "restic-compose-backup_mariadbdata",
|
||||
"Source": "/var/lib/docker/volumes/restic-compose-backup_mariadbdata/_data",
|
||||
"Destination": "/var/lib/mysql",
|
||||
"Driver": "local",
|
||||
"Mode": "rw",
|
||||
"RW": true,
|
||||
"Propagation": ""
|
||||
}],
|
||||
"Config": {
|
||||
"Hostname": "efa5196b4959",
|
||||
"Domainname": "",
|
||||
"User": "",
|
||||
"AttachStdin": false,
|
||||
"AttachStdout": false,
|
||||
"AttachStderr": false,
|
||||
"ExposedPorts": {
|
||||
"3306/tcp": {}
|
||||
},
|
||||
"Tty": false,
|
||||
"OpenStdin": false,
|
||||
"StdinOnce": false,
|
||||
"Env": ["MYSQL_ROOT_PASSWORD=my-secret-pw", "MYSQL_DATABASE=mydb", "MYSQL_USER=myuser", "MYSQL_PASSWORD=mypassword", "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin", "GOSU_VERSION=1.10", "GPG_KEYS=177F4010FE56CA3336300305F1656F24C74CD1D8", "MARIADB_MAJOR=10.4", "MARIADB_VERSION=1:10.4.12+maria~bionic"],
|
||||
"Cmd": ["mysqld"],
|
||||
"Image": "mariadb:10",
|
||||
"Volumes": {
|
||||
"/var/lib/mysql": {}
|
||||
},
|
||||
"WorkingDir": "",
|
||||
"Entrypoint": ["docker-entrypoint.sh"],
|
||||
"OnBuild": null,
|
||||
"Labels": {
|
||||
"com.docker.compose.config-hash": "c6ecde85ad111d324a4c97cde3a03898074b026c68ecffc0f7020e5eca9a71d7",
|
||||
"com.docker.compose.container-number": "1",
|
||||
"com.docker.compose.oneoff": "False",
|
||||
"com.docker.compose.project": "restic-compose-backup",
|
||||
"com.docker.compose.project.config_files": "docker-compose.yaml",
|
||||
"com.docker.compose.project.working_dir": "C:\\Users\\efors\\projects\\zetta.io\\projects\\restic-compose-backup",
|
||||
"com.docker.compose.service": "mariadb",
|
||||
"com.docker.compose.version": "1.25.4",
|
||||
"restic-compose-backup.mariadb": "True"
|
||||
}
|
||||
},
|
||||
"NetworkSettings": {
|
||||
"Bridge": "",
|
||||
"SandboxID": "d462bb5dfdd26aba12b8a395ac90262ab00d65408bf60dfa1ade0ab6a1851c70",
|
||||
"HairpinMode": false,
|
||||
"LinkLocalIPv6Address": "",
|
||||
"LinkLocalIPv6PrefixLen": 0,
|
||||
"Ports": {
|
||||
"3306/tcp": null
|
||||
},
|
||||
"SandboxKey": "/var/run/docker/netns/d462bb5dfdd2",
|
||||
"SecondaryIPAddresses": null,
|
||||
"SecondaryIPv6Addresses": null,
|
||||
"EndpointID": "",
|
||||
"Gateway": "",
|
||||
"GlobalIPv6Address": "",
|
||||
"GlobalIPv6PrefixLen": 0,
|
||||
"IPAddress": "",
|
||||
"IPPrefixLen": 0,
|
||||
"IPv6Gateway": "",
|
||||
"MacAddress": "",
|
||||
"Networks": {
|
||||
"restic-compose-backup_default": {
|
||||
"IPAMConfig": null,
|
||||
"Links": null,
|
||||
"Aliases": ["efa5196b4959", "mariadb"],
|
||||
"NetworkID": "8f3349b0debec88f9f48fff02d84cda3feae0a0e8c516e8b42e5777bb03db1cb",
|
||||
"EndpointID": "0b75d3f00aa077fe95156bc80463d33fb21d241a287b33c06769047855c38400",
|
||||
"Gateway": "172.19.0.1",
|
||||
"IPAddress": "172.19.0.3",
|
||||
"IPPrefixLen": 16,
|
||||
"IPv6Gateway": "",
|
||||
"GlobalIPv6Address": "",
|
||||
"GlobalIPv6PrefixLen": 0,
|
||||
"MacAddress": "02:42:ac:13:00:03",
|
||||
"DriverOpts": null
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,207 @@
|
|||
{
|
||||
"Id": "56c57903b6da3afd331312b244ddd0324f5b21cbbe5fc30072edf24781d80f76",
|
||||
"Created": "2020-03-06T22:36:17.266061631Z",
|
||||
"Path": "docker-entrypoint.sh",
|
||||
"Args": ["mysqld"],
|
||||
"State": {
|
||||
"Status": "running",
|
||||
"Running": true,
|
||||
"Paused": false,
|
||||
"Restarting": false,
|
||||
"OOMKilled": false,
|
||||
"Dead": false,
|
||||
"Pid": 35967,
|
||||
"ExitCode": 0,
|
||||
"Error": "",
|
||||
"StartedAt": "2020-03-06T22:36:17.636265528Z",
|
||||
"FinishedAt": "0001-01-01T00:00:00Z"
|
||||
},
|
||||
"Image": "sha256:1fd0e719c4952e22a99e30662fdd7daad53e7e53fbe135d543cc6b82be213951",
|
||||
"ResolvConfPath": "/var/lib/docker/containers/56c57903b6da3afd331312b244ddd0324f5b21cbbe5fc30072edf24781d80f76/resolv.conf",
|
||||
"HostnamePath": "/var/lib/docker/containers/56c57903b6da3afd331312b244ddd0324f5b21cbbe5fc30072edf24781d80f76/hostname",
|
||||
"HostsPath": "/var/lib/docker/containers/56c57903b6da3afd331312b244ddd0324f5b21cbbe5fc30072edf24781d80f76/hosts",
|
||||
"LogPath": "/var/lib/docker/containers/56c57903b6da3afd331312b244ddd0324f5b21cbbe5fc30072edf24781d80f76/56c57903b6da3afd331312b244ddd0324f5b21cbbe5fc30072edf24781d80f76-json.log",
|
||||
"Name": "/test_mariadb.1.q4uji32qvw4tkuvwx3pbnbgqq",
|
||||
"RestartCount": 0,
|
||||
"Driver": "overlay2",
|
||||
"Platform": "linux",
|
||||
"MountLabel": "",
|
||||
"ProcessLabel": "",
|
||||
"AppArmorProfile": "",
|
||||
"ExecIDs": null,
|
||||
"HostConfig": {
|
||||
"Binds": null,
|
||||
"ContainerIDFile": "",
|
||||
"LogConfig": {
|
||||
"Type": "json-file",
|
||||
"Config": {}
|
||||
},
|
||||
"NetworkMode": "default",
|
||||
"PortBindings": {},
|
||||
"RestartPolicy": {
|
||||
"Name": "",
|
||||
"MaximumRetryCount": 0
|
||||
},
|
||||
"AutoRemove": false,
|
||||
"VolumeDriver": "",
|
||||
"VolumesFrom": null,
|
||||
"CapAdd": null,
|
||||
"CapDrop": null,
|
||||
"Capabilities": null,
|
||||
"Dns": null,
|
||||
"DnsOptions": null,
|
||||
"DnsSearch": null,
|
||||
"ExtraHosts": null,
|
||||
"GroupAdd": null,
|
||||
"IpcMode": "private",
|
||||
"Cgroup": "",
|
||||
"Links": null,
|
||||
"OomScoreAdj": 0,
|
||||
"PidMode": "",
|
||||
"Privileged": false,
|
||||
"PublishAllPorts": false,
|
||||
"ReadonlyRootfs": false,
|
||||
"SecurityOpt": null,
|
||||
"UTSMode": "",
|
||||
"UsernsMode": "",
|
||||
"ShmSize": 67108864,
|
||||
"Runtime": "runc",
|
||||
"ConsoleSize": [0, 0],
|
||||
"Isolation": "default",
|
||||
"CpuShares": 0,
|
||||
"Memory": 0,
|
||||
"NanoCpus": 0,
|
||||
"CgroupParent": "",
|
||||
"BlkioWeight": 0,
|
||||
"BlkioWeightDevice": null,
|
||||
"BlkioDeviceReadBps": null,
|
||||
"BlkioDeviceWriteBps": null,
|
||||
"BlkioDeviceReadIOps": null,
|
||||
"BlkioDeviceWriteIOps": null,
|
||||
"CpuPeriod": 0,
|
||||
"CpuQuota": 0,
|
||||
"CpuRealtimePeriod": 0,
|
||||
"CpuRealtimeRuntime": 0,
|
||||
"CpusetCpus": "",
|
||||
"CpusetMems": "",
|
||||
"Devices": null,
|
||||
"DeviceCgroupRules": null,
|
||||
"DeviceRequests": null,
|
||||
"KernelMemory": 0,
|
||||
"KernelMemoryTCP": 0,
|
||||
"MemoryReservation": 0,
|
||||
"MemorySwap": 0,
|
||||
"MemorySwappiness": null,
|
||||
"OomKillDisable": false,
|
||||
"PidsLimit": null,
|
||||
"Ulimits": null,
|
||||
"CpuCount": 0,
|
||||
"CpuPercent": 0,
|
||||
"IOMaximumIOps": 0,
|
||||
"IOMaximumBandwidth": 0,
|
||||
"Mounts": [{
|
||||
"Type": "volume",
|
||||
"Source": "test_mariadbdata",
|
||||
"Target": "/var/lib/mysql",
|
||||
"VolumeOptions": {
|
||||
"Labels": {
|
||||
"com.docker.stack.namespace": "test"
|
||||
}
|
||||
}
|
||||
}],
|
||||
"MaskedPaths": ["/proc/asound", "/proc/acpi", "/proc/kcore", "/proc/keys", "/proc/latency_stats", "/proc/timer_list", "/proc/timer_stats", "/proc/sched_debug", "/proc/scsi", "/sys/firmware"],
|
||||
"ReadonlyPaths": ["/proc/bus", "/proc/fs", "/proc/irq", "/proc/sys", "/proc/sysrq-trigger"]
|
||||
},
|
||||
"GraphDriver": {
|
||||
"Data": {
|
||||
"LowerDir": "/var/lib/docker/overlay2/ba8a39bdb1d2e25d373b6b00c764be3d37353e57cf03981c4c3e5a20ae6a602b-init/diff:/var/lib/docker/overlay2/38780a41f93b7a20de03f1d76febb885f9213906fb30bad17cb3ad231fb7ce43/diff:/var/lib/docker/overlay2/a2abce521690b1baf6aa61e109a4659cb4272936871bc1afa73271eb8e453449/diff:/var/lib/docker/overlay2/a696286588d1d33b994b7f6e31c176c5f7e67c4f757d730323a7b6591d55f786/diff:/var/lib/docker/overlay2/c4bd8133c0d9547945d38a9998439082ce7b53df7e64737add5a5c824e6f67f2/diff:/var/lib/docker/overlay2/110e275ef21b8c9cc2cd0cce312fed5aabceb056460f637b958dfee56b7b3be8/diff:/var/lib/docker/overlay2/831c8a624e424f298766028e76a8ac08df0c5cf4564f63cae61330a8bce0cf63/diff:/var/lib/docker/overlay2/7ad8ae774951ec40c68b0993ef07ef3d70aa8aed44ea9f1e4d943ca5404cc717/diff:/var/lib/docker/overlay2/19bca9fb61ef1156f8a97313c126a6c06d7fe44a6c49e3affe16f50f2d5e56ff/diff:/var/lib/docker/overlay2/dcd4dda04d06b0a0c7e78517c6209fd67735b3027afda2c85a92de37ff7297d1/diff:/var/lib/docker/overlay2/babf41f5fe1f7b88c17cfce27214a4ad9473b0f8e0f118db948d2acddf4d4798/diff:/var/lib/docker/overlay2/b5f97865010acd5b04b4031d6223cd0b34fab89267891d61256ea16936be52f8/diff:/var/lib/docker/overlay2/6aba0159141ebb6d6783181d154c65046447b7d2bebce65d44c4939ba7943cca/diff:/var/lib/docker/overlay2/c71c34fe0e7e95409a9fc18698f0aee505940fd96aa3718836e2d89f3cfb2d49/diff:/var/lib/docker/overlay2/3be993436e2a6764a6c3c57a2e948f7a57e45ed0ec26cdd3366f4c1106c69869/diff",
|
||||
"MergedDir": "/var/lib/docker/overlay2/ba8a39bdb1d2e25d373b6b00c764be3d37353e57cf03981c4c3e5a20ae6a602b/merged",
|
||||
"UpperDir": "/var/lib/docker/overlay2/ba8a39bdb1d2e25d373b6b00c764be3d37353e57cf03981c4c3e5a20ae6a602b/diff",
|
||||
"WorkDir": "/var/lib/docker/overlay2/ba8a39bdb1d2e25d373b6b00c764be3d37353e57cf03981c4c3e5a20ae6a602b/work"
|
||||
},
|
||||
"Name": "overlay2"
|
||||
},
|
||||
"Mounts": [{
|
||||
"Type": "volume",
|
||||
"Name": "test_mariadbdata",
|
||||
"Source": "/var/lib/docker/volumes/test_mariadbdata/_data",
|
||||
"Destination": "/var/lib/mysql",
|
||||
"Driver": "local",
|
||||
"Mode": "z",
|
||||
"RW": true,
|
||||
"Propagation": ""
|
||||
}],
|
||||
"Config": {
|
||||
"Hostname": "56c57903b6da",
|
||||
"Domainname": "",
|
||||
"User": "",
|
||||
"AttachStdin": false,
|
||||
"AttachStdout": false,
|
||||
"AttachStderr": false,
|
||||
"ExposedPorts": {
|
||||
"3306/tcp": {}
|
||||
},
|
||||
"Tty": false,
|
||||
"OpenStdin": false,
|
||||
"StdinOnce": false,
|
||||
"Env": ["MYSQL_DATABASE=mydb", "MYSQL_PASSWORD=mypassword", "MYSQL_ROOT_PASSWORD=my-secret-pw", "MYSQL_USER=myuser", "PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin", "GOSU_VERSION=1.10", "GPG_KEYS=177F4010FE56CA3336300305F1656F24C74CD1D8", "MARIADB_MAJOR=10.4", "MARIADB_VERSION=1:10.4.12+maria~bionic"],
|
||||
"Cmd": ["mysqld"],
|
||||
"Image": "mariadb:10@sha256:d1ceee944c90ee3b596266de1b0ac25d2f34adbe9c35156b75bcb9a7047c7545",
|
||||
"Volumes": {
|
||||
"/var/lib/mysql": {}
|
||||
},
|
||||
"WorkingDir": "",
|
||||
"Entrypoint": ["docker-entrypoint.sh"],
|
||||
"OnBuild": null,
|
||||
"Labels": {
|
||||
"com.docker.stack.namespace": "test",
|
||||
"com.docker.swarm.node.id": "gj73oe0vgmldlv2pdcj243231",
|
||||
"com.docker.swarm.service.id": "jewh88xvythjkga24wy1thxc2",
|
||||
"com.docker.swarm.service.name": "test_mariadb",
|
||||
"com.docker.swarm.task": "",
|
||||
"com.docker.swarm.task.id": "q4uji32qvw4tkuvwx3pbnbgqq",
|
||||
"com.docker.swarm.task.name": "test_mariadb.1.q4uji32qvw4tkuvwx3pbnbgqq",
|
||||
"restic-compose-backup.mariadb": "true"
|
||||
}
|
||||
},
|
||||
"NetworkSettings": {
|
||||
"Bridge": "",
|
||||
"SandboxID": "5aa81b0859dfd6f6be629eb966ce365f22dc86620359cce3e3d25d5291b539db",
|
||||
"HairpinMode": false,
|
||||
"LinkLocalIPv6Address": "",
|
||||
"LinkLocalIPv6PrefixLen": 0,
|
||||
"Ports": {
|
||||
"3306/tcp": null
|
||||
},
|
||||
"SandboxKey": "/var/run/docker/netns/5aa81b0859df",
|
||||
"SecondaryIPAddresses": null,
|
||||
"SecondaryIPv6Addresses": null,
|
||||
"EndpointID": "",
|
||||
"Gateway": "",
|
||||
"GlobalIPv6Address": "",
|
||||
"GlobalIPv6PrefixLen": 0,
|
||||
"IPAddress": "",
|
||||
"IPPrefixLen": 0,
|
||||
"IPv6Gateway": "",
|
||||
"MacAddress": "",
|
||||
"Networks": {
|
||||
"test_default": {
|
||||
"IPAMConfig": {
|
||||
"IPv4Address": "10.0.1.3"
|
||||
},
|
||||
"Links": null,
|
||||
"Aliases": ["56c57903b6da"],
|
||||
"NetworkID": "8aweh54u31eq3i47vqdr2aonc",
|
||||
"EndpointID": "5369b4c82a479a3e9dfb3547cb7ac3a0fab888e38ad5c1d0ad02b0e9a9523a64",
|
||||
"Gateway": "",
|
||||
"IPAddress": "10.0.1.3",
|
||||
"IPPrefixLen": 24,
|
||||
"IPv6Gateway": "",
|
||||
"GlobalIPv6Address": "",
|
||||
"GlobalIPv6PrefixLen": 0,
|
||||
"MacAddress": "02:42:0a:00:01:03",
|
||||
"DriverOpts": null
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,93 @@
|
|||
{
|
||||
"ID": "k5427pk4t7ss4d7ylacumeavz",
|
||||
"Version": {
|
||||
"Index": 30
|
||||
},
|
||||
"CreatedAt": "2020-03-08T17:25:59.451947759Z",
|
||||
"UpdatedAt": "2020-03-08T17:26:38.552002711Z",
|
||||
"Spec": {
|
||||
"Labels": {},
|
||||
"Role": "manager",
|
||||
"Availability": "active"
|
||||
},
|
||||
"Description": {
|
||||
"Hostname": "docker-desktop",
|
||||
"Platform": {
|
||||
"Architecture": "x86_64",
|
||||
"OS": "linux"
|
||||
},
|
||||
"Resources": {
|
||||
"NanoCPUs": 4000000000,
|
||||
"MemoryBytes": 2085535744
|
||||
},
|
||||
"Engine": {
|
||||
"EngineVersion": "19.03.5",
|
||||
"Plugins": [{
|
||||
"Type": "Log",
|
||||
"Name": "awslogs"
|
||||
}, {
|
||||
"Type": "Log",
|
||||
"Name": "fluentd"
|
||||
}, {
|
||||
"Type": "Log",
|
||||
"Name": "gcplogs"
|
||||
}, {
|
||||
"Type": "Log",
|
||||
"Name": "gelf"
|
||||
}, {
|
||||
"Type": "Log",
|
||||
"Name": "journald"
|
||||
}, {
|
||||
"Type": "Log",
|
||||
"Name": "json-file"
|
||||
}, {
|
||||
"Type": "Log",
|
||||
"Name": "local"
|
||||
}, {
|
||||
"Type": "Log",
|
||||
"Name": "logentries"
|
||||
}, {
|
||||
"Type": "Log",
|
||||
"Name": "splunk"
|
||||
}, {
|
||||
"Type": "Log",
|
||||
"Name": "syslog"
|
||||
}, {
|
||||
"Type": "Network",
|
||||
"Name": "bridge"
|
||||
}, {
|
||||
"Type": "Network",
|
||||
"Name": "host"
|
||||
}, {
|
||||
"Type": "Network",
|
||||
"Name": "ipvlan"
|
||||
}, {
|
||||
"Type": "Network",
|
||||
"Name": "macvlan"
|
||||
}, {
|
||||
"Type": "Network",
|
||||
"Name": "null"
|
||||
}, {
|
||||
"Type": "Network",
|
||||
"Name": "overlay"
|
||||
}, {
|
||||
"Type": "Volume",
|
||||
"Name": "local"
|
||||
}]
|
||||
},
|
||||
"TLSInfo": {
|
||||
"TrustRoot": "-----BEGIN CERTIFICATE-----\nMIIBazCCARCgAwIBAgIUfx7TP8c4SHCrwPPxjSFJQcfTP5QwCgYIKoZIzj0EAwIw\nEzERMA8GA1UEAxMIc3dhcm0tY2EwHhcNMjAwMzA4MTcyMTAwWhcNNDAwMzAzMTcy\nMTAwWjATMREwDwYDVQQDEwhzd2FybS1jYTBZMBMGByqGSM49AgEGCCqGSM49AwEH\nA0IABGOa/9Rdd6qNc24wvuL/I9t5Vt3MJzlwC+WN0R6HrA4Ik1h2dmSRZTQqnCI7\nWh16y+PLaFwIfN0JkN4FrpnUBsyjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB\nAf8EBTADAQH/MB0GA1UdDgQWBBTAPDjHRwjQhNGUxqE3COHCOQrOkjAKBggqhkjO\nPQQDAgNJADBGAiEAxd/lPEKy3gt3nfZ8DX7kDaaNH8jSPgCBx3ejUs3SoaUCIQD3\nZ8dVxNvG4+Gvn28mDjWhTNLCn0BYW6JFE8eTI0xv4A==\n-----END CERTIFICATE-----\n",
|
||||
"CertIssuerSubject": "MBMxETAPBgNVBAMTCHN3YXJtLWNh",
|
||||
"CertIssuerPublicKey": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEY5r/1F13qo1zbjC+4v8j23lW3cwnOXAL5Y3RHoesDgiTWHZ2ZJFlNCqcIjtaHXrL48toXAh83QmQ3gWumdQGzA=="
|
||||
}
|
||||
},
|
||||
"Status": {
|
||||
"State": "ready",
|
||||
"Addr": "192.168.65.3"
|
||||
},
|
||||
"ManagerStatus": {
|
||||
"Leader": true,
|
||||
"Reachability": "reachable",
|
||||
"Addr": "192.168.65.3:2377"
|
||||
}
|
||||
}
|
|
@ -1,13 +1,21 @@
|
|||
# Making a release
|
||||
|
||||
- Update version in setup.py
|
||||
- Update version in `setup.py`
|
||||
- Update version in `docs/conf.py`
|
||||
- Update version in `restic_compose_backup/__init__.py`
|
||||
- Build and tag image
|
||||
- push: `docker push zettaio/restic-compose-backup:<version>`
|
||||
- Ensure RTD has new docs published
|
||||
|
||||
## Example
|
||||
|
||||
When releasing a bugfix version we need to update the
|
||||
main image as well.
|
||||
|
||||
```bash
|
||||
docker build . --tag zettaio/restic-compose-backup:0.2.0
|
||||
docker push zettaio/restic-compose-backup:0.2.0
|
||||
docker build src --tag zettaio/restic-compose-backup:0.6
|
||||
docker build src --tag zettaio/restic-compose-backup:0.6.0
|
||||
|
||||
docker push zettaio/restic-compose-backup:0.5
|
||||
docker push zettaio/restic-compose-backup:0.5.0
|
||||
```
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
[pytest]
|
||||
testpaths = tests
|
||||
testpaths = src/tests
|
||||
python_files=test*.py
|
||||
addopts = -v --verbose
|
||||
|
|
|
@ -1,6 +1,13 @@
|
|||
# DON'T COMMIT THIS FILE IF YOU MODIFY IN DEV
|
||||
|
||||
DOCKER_BASE_URL=unix://tmp/docker.sock
|
||||
# DOCKER_HOST=unix://tmp/docker.sock
|
||||
# DOCKER_TLS_VERIFY=1
|
||||
# DOCKER_CERT_PATH=''
|
||||
|
||||
SWARM_MODE=true
|
||||
INCLUDE_PROJECT_NAME=false
|
||||
EXCLUDE_BIND_MOUNTS=false
|
||||
|
||||
RESTIC_REPOSITORY=/restic_data
|
||||
RESTIC_PASSWORD=password
|
||||
|
||||
|
@ -10,6 +17,7 @@ RESTIC_KEEP_MONTHLY=12
|
|||
RESTIC_KEEP_YEARLY=3
|
||||
|
||||
LOG_LEVEL=info
|
||||
CRON_SCHEDULE=10 2 * * *
|
||||
|
||||
# EMAIL_HOST=
|
||||
# EMAIL_PORT=
|
||||
|
|
|
@ -1,210 +0,0 @@
|
|||
import argparse
|
||||
import pprint
|
||||
import logging
|
||||
|
||||
from restic_compose_backup import (
|
||||
alerts,
|
||||
backup_runner,
|
||||
log,
|
||||
restic,
|
||||
)
|
||||
from restic_compose_backup.config import Config
|
||||
from restic_compose_backup.containers import RunningContainers
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def main():
|
||||
"""CLI entrypoint"""
|
||||
args = parse_args()
|
||||
config = Config()
|
||||
log.setup(level=args.log_level or config.log_level)
|
||||
containers = RunningContainers()
|
||||
|
||||
# Ensure log level is propagated to parent container if overridden
|
||||
if args.log_level:
|
||||
containers.this_container.set_config_env('LOG_LEVEL', args.log_level)
|
||||
|
||||
if args.action == 'status':
|
||||
status(config, containers)
|
||||
|
||||
elif args.action == 'snapshots':
|
||||
snapshots(config, containers)
|
||||
|
||||
elif args.action == 'backup':
|
||||
backup(config, containers)
|
||||
|
||||
elif args.action == 'start-backup-process':
|
||||
start_backup_process(config, containers)
|
||||
|
||||
elif args.action == 'cleanup':
|
||||
cleanup(config, containers)
|
||||
|
||||
elif args.action == 'alert':
|
||||
alert(config, containers)
|
||||
|
||||
|
||||
def status(config, containers):
|
||||
"""Outputs the backup config for the compose setup"""
|
||||
logger.info("Status for compose project '%s'", containers.project_name)
|
||||
logger.info("Backup currently running?: %s", containers.backup_process_running)
|
||||
logger.info("%s Detected Config %s", "-" * 25, "-" * 25)
|
||||
|
||||
backup_containers = containers.containers_for_backup()
|
||||
for container in backup_containers:
|
||||
logger.info('service: %s', container.service_name)
|
||||
|
||||
if container.volume_backup_enabled:
|
||||
for mount in container.filter_mounts():
|
||||
logger.info(' - volume: %s', mount.source)
|
||||
|
||||
if container.database_backup_enabled:
|
||||
instance = container.instance
|
||||
ping = instance.ping()
|
||||
logger.info(' - %s (is_ready=%s)', instance.container_type, ping == 0)
|
||||
if ping != 0:
|
||||
logger.error("Database '%s' in service %s cannot be reached", instance.container_type, container.service_name)
|
||||
|
||||
if len(backup_containers) == 0:
|
||||
logger.info("No containers in the project has 'restic-compose-backup.enabled' label")
|
||||
|
||||
logger.info("-" * 67)
|
||||
|
||||
|
||||
def backup(config, containers):
|
||||
"""Request a backup to start"""
|
||||
# Make sure we don't spawn multiple backup processes
|
||||
if containers.backup_process_running:
|
||||
raise ValueError("Backup process already running")
|
||||
|
||||
logger.info("Initializing repository (may fail if already initalized)")
|
||||
|
||||
# TODO: Errors when repo already exists
|
||||
restic.init_repo(config.repository)
|
||||
|
||||
# Map all volumes from the backup container into the backup process container
|
||||
volumes = containers.this_container.volumes
|
||||
|
||||
# Map volumes from other containers we are backing up
|
||||
mounts = containers.generate_backup_mounts('/volumes')
|
||||
volumes.update(mounts)
|
||||
|
||||
result = backup_runner.run(
|
||||
image=containers.this_container.image,
|
||||
command='restic-compose-backup start-backup-process',
|
||||
volumes=volumes,
|
||||
environment=containers.this_container.environment,
|
||||
source_container_id=containers.this_container.id,
|
||||
labels={
|
||||
"restic-compose-backup.backup_process": 'True',
|
||||
"com.docker.compose.project": containers.project_name,
|
||||
},
|
||||
)
|
||||
logger.info('Backup container exit code: %s', result)
|
||||
|
||||
# Alert the user if something went wrong
|
||||
if result != 0:
|
||||
alerts.send(
|
||||
subject="Backup process exited with non-zero code",
|
||||
body=open('backup.log').read(),
|
||||
alert_type='ERROR',
|
||||
)
|
||||
|
||||
|
||||
def start_backup_process(config, containers):
|
||||
"""The actual backup process running inside the spawned container"""
|
||||
if (not containers.backup_process_container
|
||||
or containers.this_container == containers.backup_process_container is False):
|
||||
logger.error(
|
||||
"Cannot run backup process in this container. Use backup command instead. "
|
||||
"This will spawn a new container with the necessary mounts."
|
||||
)
|
||||
return
|
||||
|
||||
status(config, containers)
|
||||
errors = False
|
||||
|
||||
# Back up volumes
|
||||
try:
|
||||
logger.info('Backing up volumes')
|
||||
vol_result = restic.backup_files(config.repository, source='/volumes')
|
||||
logger.debug('Volume backup exit code: %s', vol_result)
|
||||
if vol_result != 0:
|
||||
logger.error('Backup command exited with non-zero code: %s', vol_result)
|
||||
errors = True
|
||||
except Exception as ex:
|
||||
logger.error(ex)
|
||||
errors = True
|
||||
|
||||
# back up databases
|
||||
for container in containers.containers_for_backup():
|
||||
if container.database_backup_enabled:
|
||||
try:
|
||||
instance = container.instance
|
||||
logger.info('Backing up %s in service %s', instance.container_type, instance.service_name)
|
||||
result = instance.backup()
|
||||
logger.debug('Exit code: %s', result)
|
||||
if result != 0:
|
||||
logger.error('Backup command exited with non-zero code: %s', result)
|
||||
errors = True
|
||||
except Exception as ex:
|
||||
logger.error(ex)
|
||||
errors = True
|
||||
|
||||
if errors:
|
||||
exit(1)
|
||||
|
||||
# Only run cleanup if backup was successful
|
||||
result = cleanup(config, container)
|
||||
logger.debug('cleanup exit code: %s', errors)
|
||||
if result != 0:
|
||||
exit(1)
|
||||
|
||||
|
||||
def cleanup(config, containers):
|
||||
"""Run forget / prune to minimize storage space"""
|
||||
logger.info('Forget outdated snapshots')
|
||||
forget_result = restic.forget(
|
||||
config.repository,
|
||||
config.keep_daily,
|
||||
config.keep_weekly,
|
||||
config.keep_monthly,
|
||||
config.keep_yearly,
|
||||
)
|
||||
logger.info('Prune stale data freeing storage space')
|
||||
prune_result = restic.prune(config.repository)
|
||||
return forget_result == 0 and prune_result == 0
|
||||
|
||||
def snapshots(config, containers):
|
||||
"""Display restic snapshots"""
|
||||
stdout, stderr = restic.snapshots(config.repository, last=True)
|
||||
for line in stdout.decode().split('\n'):
|
||||
print(line)
|
||||
|
||||
|
||||
def alert(config, containers):
|
||||
"""Test alerts"""
|
||||
logger.info("Testing alerts")
|
||||
alerts.send(
|
||||
subject="{}: Test Alert".format(containers.project_name),
|
||||
body="Test message",
|
||||
)
|
||||
|
||||
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser(prog='restic_compose_backup')
|
||||
parser.add_argument(
|
||||
'action',
|
||||
choices=['status', 'snapshots', 'backup', 'start-backup-process', 'alert', 'cleanup'],
|
||||
)
|
||||
parser.add_argument(
|
||||
'--log-level',
|
||||
default=None,
|
||||
choices=list(log.LOG_LEVELS.keys()),
|
||||
help="Log level"
|
||||
)
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -1,37 +0,0 @@
|
|||
"""
|
||||
"""
|
||||
import smtplib
|
||||
from email.mime.text import MIMEText
|
||||
|
||||
EMAIL_HOST = "smtp.gmail.com"
|
||||
EMAIL_PORT = 465
|
||||
EMAIL_HOST_USER = ""
|
||||
EMAIL_HOST_PASSWORD = ""
|
||||
EMAIL_SEND_TO = ['']
|
||||
|
||||
|
||||
def main():
|
||||
send_mail("Hello world!")
|
||||
|
||||
|
||||
def send_mail(text):
|
||||
msg = MIMEText(text)
|
||||
msg['Subject'] = "Message from restic-compose-backup"
|
||||
msg['From'] = EMAIL_HOST_USER
|
||||
msg['To'] = ', '.join(EMAIL_SEND_TO)
|
||||
|
||||
try:
|
||||
print("Connecting to {} port {}".format(EMAIL_HOST, EMAIL_PORT))
|
||||
server = smtplib.SMTP_SSL(EMAIL_HOST, EMAIL_PORT)
|
||||
server.ehlo()
|
||||
server.login(EMAIL_HOST_USER, EMAIL_HOST_PASSWORD)
|
||||
server.sendmail(EMAIL_HOST_USER, EMAIL_SEND_TO, msg.as_string())
|
||||
print('Email Sent')
|
||||
except Exception as e:
|
||||
print(e)
|
||||
finally:
|
||||
server.close()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -1,54 +0,0 @@
|
|||
import os
|
||||
from contextlib import contextmanager
|
||||
import docker
|
||||
|
||||
from restic_compose_backup.config import Config
|
||||
|
||||
TRUE_VALUES = ['1', 'true', 'True', True, 1]
|
||||
|
||||
|
||||
def list_containers():
|
||||
"""
|
||||
List all containers.
|
||||
|
||||
Returns:
|
||||
List of raw container json data from the api
|
||||
"""
|
||||
config = Config()
|
||||
client = docker.DockerClient(base_url=config.docker_base_url)
|
||||
all_containers = client.containers.list()
|
||||
client.close()
|
||||
return [c.attrs for c in all_containers]
|
||||
|
||||
|
||||
def is_true(value):
|
||||
"""
|
||||
Evaluates the truthfullness of a bool value in container labels
|
||||
"""
|
||||
return value in TRUE_VALUES
|
||||
|
||||
|
||||
def strip_root(path):
|
||||
"""
|
||||
Removes the root slash in a path.
|
||||
Example: /srv/data becomes srv/data
|
||||
"""
|
||||
path = path.strip()
|
||||
if path.startswith('/'):
|
||||
return path[1:]
|
||||
|
||||
return path
|
||||
|
||||
|
||||
@contextmanager
|
||||
def environment(name, value):
|
||||
"""Tempset env var"""
|
||||
old_val = os.environ.get(name)
|
||||
os.environ[name] = value
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
if old_val is None:
|
||||
del os.environ[name]
|
||||
else:
|
||||
os.environ[name] = old_val
|
|
@ -0,0 +1,3 @@
|
|||
tests/
|
||||
__pycache__
|
||||
.DS_Store
|
|
@ -0,0 +1,15 @@
|
|||
FROM restic/restic:0.9.6
|
||||
|
||||
RUN apk update && apk add python3 \
|
||||
dcron \
|
||||
mariadb-client \
|
||||
postgresql-client \
|
||||
mariadb-connector-c-dev
|
||||
|
||||
ADD . /restic-compose-backup
|
||||
WORKDIR /restic-compose-backup
|
||||
RUN pip3 install -U pip setuptools wheel && pip3 install -e .
|
||||
ENV XDG_CACHE_HOME=/cache
|
||||
|
||||
ENTRYPOINT []
|
||||
CMD ["./entrypoint.sh"]
|
|
@ -0,0 +1,2 @@
|
|||
10 2 * * * source /env.sh && rcb backup > /proc/1/fd/1
|
||||
|
|
@ -1,7 +1,10 @@
|
|||
#!/bin/sh
|
||||
|
||||
# Dump all env vars so we can source them in cron jobs
|
||||
printenv | sed 's/^\(.*\)$/export \1/g' > /env.sh
|
||||
rcb dump-env > /env.sh
|
||||
|
||||
# Write crontab
|
||||
rcb crontab > crontab
|
||||
|
||||
# start cron in the foreground
|
||||
crontab crontab
|
|
@ -0,0 +1 @@
|
|||
__version__ = '0.7.1'
|
|
@ -2,7 +2,6 @@ import logging
|
|||
|
||||
from restic_compose_backup.alerts.smtp import SMTPAlert
|
||||
from restic_compose_backup.alerts.discord import DiscordWebhookAlert
|
||||
from restic_compose_backup.config import Config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -24,6 +23,7 @@ def send(subject: str = None, body: str = None, alert_type: str = 'INFO'):
|
|||
)
|
||||
except Exception as ex:
|
||||
logger.error("Exception raised when sending alert [%s]: %s", instance.name, ex)
|
||||
logger.exception(ex)
|
||||
|
||||
if len(alert_classes) == 0:
|
||||
logger.info("No alerts configured")
|
||||
|
@ -36,7 +36,7 @@ def configured_alert_types():
|
|||
|
||||
for cls in BACKENDS:
|
||||
instance = cls.create_from_env()
|
||||
logger.debug("Alert backend '%s' configured: %s", cls.name, instance != None)
|
||||
logger.debug("Alert backend '%s' configured: %s", cls.name, instance is not None)
|
||||
if instance:
|
||||
entires.append(instance)
|
||||
|
|
@ -1,6 +1,5 @@
|
|||
import os
|
||||
import logging
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import requests
|
||||
from restic_compose_backup.alerts.base import BaseAlert
|
||||
|
@ -31,16 +30,19 @@ class DiscordWebhookAlert(BaseAlert):
|
|||
def send(self, subject: str = None, body: str = None, alert_type: str = None):
|
||||
"""Send basic webhook request. Max embed size is 6000"""
|
||||
logger.info("Triggering discord webhook")
|
||||
# NOTE: The title size is 2048
|
||||
# The max description size is 2048
|
||||
# Total embed size limit is 6000 characters (per embed)
|
||||
data = {
|
||||
'embeds': [
|
||||
{
|
||||
'title': subject,
|
||||
'description': body[:5000],
|
||||
'title': subject[-256:],
|
||||
'description': body[-2048:] if body else "",
|
||||
},
|
||||
]
|
||||
}
|
||||
response = requests.post(self.url, params={'wait': True}, json=data)
|
||||
if response.status_code not in self.success_codes:
|
||||
log.error("Discord webhook failed: %s: %s", response.status_code, response.content)
|
||||
logger.error("Discord webhook failed: %s: %s", response.status_code, response.content)
|
||||
else:
|
||||
logger.info('Discord webhook successful')
|
|
@ -15,7 +15,7 @@ class SMTPAlert(BaseAlert):
|
|||
self.host = host
|
||||
self.port = port
|
||||
self.user = user
|
||||
self.password = password
|
||||
self.password = password or ""
|
||||
self.to = to
|
||||
|
||||
@classmethod
|
||||
|
@ -34,7 +34,7 @@ class SMTPAlert(BaseAlert):
|
|||
|
||||
@property
|
||||
def properly_configured(self) -> bool:
|
||||
return self.host and self.port and self.user and self.password and len(self.to) > 0
|
||||
return self.host and self.port and self.user and len(self.to) > 0
|
||||
|
||||
def send(self, subject: str = None, body: str = None, alert_type: str = 'INFO'):
|
||||
# send_mail("Hello world!")
|
||||
|
@ -51,6 +51,6 @@ class SMTPAlert(BaseAlert):
|
|||
server.sendmail(self.user, self.to, msg.as_string())
|
||||
logger.info('Email sent')
|
||||
except Exception as ex:
|
||||
logger.error(ex)
|
||||
logger.exception(ex)
|
||||
finally:
|
||||
server.close()
|
|
@ -1,8 +1,7 @@
|
|||
import logging
|
||||
import os
|
||||
import docker
|
||||
|
||||
from restic_compose_backup.config import Config
|
||||
from restic_compose_backup import utils
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -10,8 +9,7 @@ logger = logging.getLogger(__name__)
|
|||
def run(image: str = None, command: str = None, volumes: dict = None,
|
||||
environment: dict = None, labels: dict = None, source_container_id: str = None):
|
||||
logger.info("Starting backup container")
|
||||
config = Config()
|
||||
client = docker.DockerClient(base_url=config.docker_base_url)
|
||||
client = utils.docker_client()
|
||||
|
||||
container = client.containers.run(
|
||||
image,
|
||||
|
@ -19,7 +17,7 @@ def run(image: str = None, command: str = None, volumes: dict = None,
|
|||
labels=labels,
|
||||
# auto_remove=True, # We remove the container further down
|
||||
detach=True,
|
||||
environment=environment,
|
||||
environment=environment + ['BACKUP_PROCESS_CONTAINER=true'],
|
||||
volumes=volumes,
|
||||
network_mode=f'container:{source_container_id}', # Reuse original container's network stack.
|
||||
working_dir=os.getcwd(),
|
||||
|
@ -35,7 +33,13 @@ def run(image: str = None, command: str = None, volumes: dict = None,
|
|||
line = ""
|
||||
while True:
|
||||
try:
|
||||
line += next(stream).decode()
|
||||
# Make log streaming work for docker ce 17 and 18.
|
||||
# For some reason strings are returned instead if bytes.
|
||||
data = next(stream)
|
||||
if isinstance(data, bytes):
|
||||
line += data.decode()
|
||||
elif isinstance(data, str):
|
||||
line += data
|
||||
if line.endswith('\n'):
|
||||
break
|
||||
except StopIteration:
|
||||
|
@ -51,7 +55,7 @@ def run(image: str = None, command: str = None, volumes: dict = None,
|
|||
fd.write('\n')
|
||||
logger.info(line)
|
||||
|
||||
|
||||
container.wait()
|
||||
container.reload()
|
||||
logger.debug("Container ExitCode %s", container.attrs['State']['ExitCode'])
|
||||
container.remove()
|
|
@ -0,0 +1,331 @@
|
|||
import argparse
|
||||
import os
|
||||
import logging
|
||||
|
||||
from restic_compose_backup import (
|
||||
alerts,
|
||||
backup_runner,
|
||||
log,
|
||||
restic,
|
||||
)
|
||||
from restic_compose_backup.config import Config
|
||||
from restic_compose_backup.containers import RunningContainers
|
||||
from restic_compose_backup import cron, utils
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def main():
|
||||
"""CLI entrypoint"""
|
||||
args = parse_args()
|
||||
config = Config()
|
||||
log.setup(level=args.log_level or config.log_level)
|
||||
containers = RunningContainers()
|
||||
|
||||
# Ensure log level is propagated to parent container if overridden
|
||||
if args.log_level:
|
||||
containers.this_container.set_config_env('LOG_LEVEL', args.log_level)
|
||||
|
||||
if args.action == 'status':
|
||||
status(config, containers)
|
||||
|
||||
elif args.action == 'snapshots':
|
||||
snapshots(config, containers)
|
||||
|
||||
elif args.action == 'backup':
|
||||
backup(config, containers)
|
||||
|
||||
elif args.action == 'start-backup-process':
|
||||
start_backup_process(config, containers)
|
||||
|
||||
elif args.action == 'cleanup':
|
||||
cleanup(config, containers)
|
||||
|
||||
elif args.action == 'alert':
|
||||
alert(config, containers)
|
||||
|
||||
elif args.action == 'version':
|
||||
import restic_compose_backup
|
||||
print(restic_compose_backup.__version__)
|
||||
|
||||
elif args.action == "crontab":
|
||||
crontab(config)
|
||||
|
||||
elif args.action == "dump-env":
|
||||
dump_env()
|
||||
|
||||
# Random test stuff here
|
||||
elif args.action == "test":
|
||||
nodes = utils.get_swarm_nodes()
|
||||
print("Swarm nodes:")
|
||||
for node in nodes:
|
||||
addr = node.attrs['Status']['Addr']
|
||||
state = node.attrs['Status']['State']
|
||||
print(' - {} {} {}'.format(node.id, addr, state))
|
||||
|
||||
|
||||
def status(config, containers):
|
||||
"""Outputs the backup config for the compose setup"""
|
||||
logger.info("Status for compose project '%s'", containers.project_name)
|
||||
logger.info("Repository: '%s'", config.repository)
|
||||
logger.info("Backup currently running?: %s", containers.backup_process_running)
|
||||
logger.info("Include project name in backup path?: %s", utils.is_true(config.include_project_name))
|
||||
logger.debug("Exclude bind mounts from backups?: %s", utils.is_true(config.exclude_bind_mounts))
|
||||
logger.info("Checking docker availability")
|
||||
|
||||
utils.list_containers()
|
||||
|
||||
if containers.stale_backup_process_containers:
|
||||
utils.remove_containers(containers.stale_backup_process_containers)
|
||||
|
||||
# Check if repository is initialized with restic snapshots
|
||||
if not restic.is_initialized(config.repository):
|
||||
logger.info("Could not get repository info. Attempting to initialize it.")
|
||||
result = restic.init_repo(config.repository)
|
||||
if result == 0:
|
||||
logger.info("Successfully initialized repository: %s", config.repository)
|
||||
else:
|
||||
logger.error("Failed to initialize repository")
|
||||
|
||||
logger.info("%s Detected Config %s", "-" * 25, "-" * 25)
|
||||
|
||||
# Start making snapshots
|
||||
backup_containers = containers.containers_for_backup()
|
||||
for container in backup_containers:
|
||||
logger.info('service: %s', container.service_name)
|
||||
|
||||
if container.volume_backup_enabled:
|
||||
for mount in container.filter_mounts():
|
||||
logger.info(
|
||||
' - volume: %s -> %s',
|
||||
mount.source,
|
||||
container.get_volume_backup_destination(mount, '/volumes'),
|
||||
)
|
||||
|
||||
if container.database_backup_enabled:
|
||||
instance = container.instance
|
||||
ping = instance.ping()
|
||||
logger.info(
|
||||
' - %s (is_ready=%s) -> %s',
|
||||
instance.container_type,
|
||||
ping == 0,
|
||||
instance.backup_destination_path(),
|
||||
)
|
||||
if ping != 0:
|
||||
logger.error("Database '%s' in service %s cannot be reached",
|
||||
instance.container_type, container.service_name)
|
||||
|
||||
if len(backup_containers) == 0:
|
||||
logger.info("No containers in the project has 'restic-compose-backup.*' label")
|
||||
|
||||
logger.info("-" * 67)
|
||||
|
||||
|
||||
def backup(config, containers):
|
||||
"""Request a backup to start"""
|
||||
# Make sure we don't spawn multiple backup processes
|
||||
if containers.backup_process_running:
|
||||
alerts.send(
|
||||
subject="Backup process container already running",
|
||||
body=(
|
||||
"A backup process container is already running. \n"
|
||||
f"Id: {containers.backup_process_container.id}\n"
|
||||
f"Name: {containers.backup_process_container.name}\n"
|
||||
),
|
||||
alert_type='ERROR',
|
||||
)
|
||||
raise RuntimeError("Backup process already running")
|
||||
|
||||
# Map all volumes from the backup container into the backup process container
|
||||
volumes = containers.this_container.volumes
|
||||
|
||||
# Map volumes from other containers we are backing up
|
||||
mounts = containers.generate_backup_mounts('/volumes')
|
||||
volumes.update(mounts)
|
||||
|
||||
logger.debug('Starting backup container with image %s', containers.this_container.image)
|
||||
try:
|
||||
result = backup_runner.run(
|
||||
image=containers.this_container.image,
|
||||
command='restic-compose-backup start-backup-process',
|
||||
volumes=volumes,
|
||||
environment=containers.this_container.environment,
|
||||
source_container_id=containers.this_container.id,
|
||||
labels={
|
||||
containers.backup_process_label: 'True',
|
||||
"com.docker.compose.project": containers.project_name,
|
||||
},
|
||||
)
|
||||
except Exception as ex:
|
||||
logger.exception(ex)
|
||||
alerts.send(
|
||||
subject="Exception during backup",
|
||||
body=str(ex),
|
||||
alert_type='ERROR',
|
||||
)
|
||||
return
|
||||
|
||||
logger.info('Backup container exit code: %s', result)
|
||||
|
||||
# Alert the user if something went wrong
|
||||
if result != 0:
|
||||
alerts.send(
|
||||
subject="Backup process exited with non-zero code",
|
||||
body=open('backup.log').read(),
|
||||
alert_type='ERROR',
|
||||
)
|
||||
|
||||
|
||||
def start_backup_process(config, containers):
|
||||
"""The actual backup process running inside the spawned container"""
|
||||
if not utils.is_true(os.environ.get('BACKUP_PROCESS_CONTAINER')):
|
||||
logger.error(
|
||||
"Cannot run backup process in this container. Use backup command instead. "
|
||||
"This will spawn a new container with the necessary mounts."
|
||||
)
|
||||
alerts.send(
|
||||
subject="Cannot run backup process in this container",
|
||||
body=(
|
||||
"Cannot run backup process in this container. Use backup command instead. "
|
||||
"This will spawn a new container with the necessary mounts."
|
||||
)
|
||||
)
|
||||
exit(1)
|
||||
|
||||
status(config, containers)
|
||||
errors = False
|
||||
|
||||
# Did we actually get any volumes mounted?
|
||||
try:
|
||||
has_volumes = os.stat('/volumes') is not None
|
||||
except FileNotFoundError:
|
||||
logger.warning("Found no volumes to back up")
|
||||
has_volumes = False
|
||||
|
||||
# Warn if there is nothing to do
|
||||
if len(containers.containers_for_backup()) == 0 and not has_volumes:
|
||||
logger.error("No containers for backup found")
|
||||
exit(1)
|
||||
|
||||
if has_volumes:
|
||||
try:
|
||||
logger.info('Backing up volumes')
|
||||
vol_result = restic.backup_files(config.repository, source='/volumes')
|
||||
logger.debug('Volume backup exit code: %s', vol_result)
|
||||
if vol_result != 0:
|
||||
logger.error('Volume backup exited with non-zero code: %s', vol_result)
|
||||
errors = True
|
||||
except Exception as ex:
|
||||
logger.error('Exception raised during volume backup')
|
||||
logger.exception(ex)
|
||||
errors = True
|
||||
|
||||
# back up databases
|
||||
logger.info('Backing up databases')
|
||||
for container in containers.containers_for_backup():
|
||||
if container.database_backup_enabled:
|
||||
try:
|
||||
instance = container.instance
|
||||
logger.info('Backing up %s in service %s', instance.container_type, instance.service_name)
|
||||
result = instance.backup()
|
||||
logger.debug('Exit code: %s', result)
|
||||
if result != 0:
|
||||
logger.error('Backup command exited with non-zero code: %s', result)
|
||||
errors = True
|
||||
except Exception as ex:
|
||||
logger.exception(ex)
|
||||
errors = True
|
||||
|
||||
if errors:
|
||||
logger.error('Exit code: %s', errors)
|
||||
exit(1)
|
||||
|
||||
# Only run cleanup if backup was successful
|
||||
result = cleanup(config, container)
|
||||
logger.debug('cleanup exit code: %s', result)
|
||||
if result != 0:
|
||||
logger.error('cleanup exit code: %s', result)
|
||||
exit(1)
|
||||
|
||||
# Test the repository for errors
|
||||
logger.info("Checking the repository for errors")
|
||||
result = restic.check(config.repository)
|
||||
if result != 0:
|
||||
logger.error('Check exit code: %s', result)
|
||||
exit(1)
|
||||
|
||||
logger.info('Backup completed')
|
||||
|
||||
|
||||
def cleanup(config, containers):
|
||||
"""Run forget / prune to minimize storage space"""
|
||||
logger.info('Forget outdated snapshots')
|
||||
forget_result = restic.forget(
|
||||
config.repository,
|
||||
config.keep_daily,
|
||||
config.keep_weekly,
|
||||
config.keep_monthly,
|
||||
config.keep_yearly,
|
||||
)
|
||||
logger.info('Prune stale data freeing storage space')
|
||||
prune_result = restic.prune(config.repository)
|
||||
return forget_result and prune_result
|
||||
|
||||
|
||||
def snapshots(config, containers):
|
||||
"""Display restic snapshots"""
|
||||
stdout, stderr = restic.snapshots(config.repository, last=True)
|
||||
for line in stdout.decode().split('\n'):
|
||||
print(line)
|
||||
|
||||
|
||||
def alert(config, containers):
|
||||
"""Test alerts"""
|
||||
logger.info("Testing alerts")
|
||||
alerts.send(
|
||||
subject="{}: Test Alert".format(containers.project_name),
|
||||
body="Test message",
|
||||
)
|
||||
|
||||
|
||||
def crontab(config):
|
||||
"""Generate the crontab"""
|
||||
print(cron.generate_crontab(config))
|
||||
|
||||
|
||||
def dump_env():
|
||||
"""Dump all environment variables to a script that can be sourced from cron"""
|
||||
print("#!/bin/bash")
|
||||
print("# This file was generated by restic-compose-backup")
|
||||
for key, value in os.environ.items():
|
||||
print("export {}='{}'".format(key, value))
|
||||
|
||||
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser(prog='restic_compose_backup')
|
||||
parser.add_argument(
|
||||
'action',
|
||||
choices=[
|
||||
'status',
|
||||
'snapshots',
|
||||
'backup',
|
||||
'start-backup-process',
|
||||
'alert',
|
||||
'cleanup',
|
||||
'version',
|
||||
'crontab',
|
||||
'dump-env',
|
||||
'test',
|
||||
],
|
||||
)
|
||||
parser.add_argument(
|
||||
'--log-level',
|
||||
default=None,
|
||||
choices=list(log.LOG_LEVELS.keys()),
|
||||
help="Log level"
|
||||
)
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -6,7 +6,7 @@ logger = logging.getLogger(__name__)
|
|||
|
||||
|
||||
def test():
|
||||
return run_command(['ls', '/volumes'])
|
||||
return run(['ls', '/volumes'])
|
||||
|
||||
|
||||
def ping_mysql(host, port, username) -> int:
|
||||
|
@ -23,7 +23,7 @@ def ping_mysql(host, port, username) -> int:
|
|||
])
|
||||
|
||||
|
||||
def ping_mariadb(host, port, username): #, password) -> int:
|
||||
def ping_mariadb(host, port, username) -> int:
|
||||
"""Check if the mariadb is up and can be reached"""
|
||||
return run([
|
||||
'mysqladmin',
|
||||
|
@ -43,7 +43,7 @@ def ping_postgres(host, port, username, password) -> int:
|
|||
"pg_isready",
|
||||
f"--host={host}",
|
||||
f"--port={port}",
|
||||
f"--username={username}",
|
||||
f"--username={username}",
|
||||
])
|
||||
|
||||
|
||||
|
@ -53,14 +53,12 @@ def run(cmd: List[str]) -> int:
|
|||
child = Popen(cmd, stdout=PIPE, stderr=PIPE)
|
||||
stdoutdata, stderrdata = child.communicate()
|
||||
|
||||
if stdoutdata:
|
||||
logger.debug(stdoutdata.decode().strip())
|
||||
logger.debug('-' * 28)
|
||||
if stdoutdata.strip():
|
||||
log_std('stdout', stdoutdata.decode(),
|
||||
logging.DEBUG if child.returncode == 0 else logging.ERROR)
|
||||
|
||||
if stderrdata:
|
||||
logger.error('%s STDERR %s', '-' * 10, '-' * 10)
|
||||
logger.error(stderrdata.decode().strip())
|
||||
logger.error('-' * 28)
|
||||
if stderrdata.strip():
|
||||
log_std('stderr', stderrdata.decode(), logging.ERROR)
|
||||
|
||||
logger.debug("returncode %s", child.returncode)
|
||||
return child.returncode
|
||||
|
@ -71,3 +69,23 @@ def run_capture_std(cmd: List[str]) -> Tuple[str, str]:
|
|||
logger.debug('cmd: %s', ' '.join(cmd))
|
||||
child = Popen(cmd, stdout=PIPE, stderr=PIPE)
|
||||
return child.communicate()
|
||||
|
||||
|
||||
def log_std(source: str, data: str, level: int):
|
||||
if isinstance(data, bytes):
|
||||
data = data.decode()
|
||||
|
||||
if not data.strip():
|
||||
return
|
||||
|
||||
log_func = logger.debug if level == logging.DEBUG else logger.error
|
||||
log_func('%s %s %s', '-' * 10, source, '-' * 10)
|
||||
|
||||
lines = data.split('\n')
|
||||
if lines[-1] == '':
|
||||
lines.pop()
|
||||
|
||||
for line in lines:
|
||||
log_func(line)
|
||||
|
||||
log_func('-' * 28)
|
|
@ -2,12 +2,19 @@ import os
|
|||
|
||||
|
||||
class Config:
|
||||
default_backup_command = "source /env.sh && rcb backup > /proc/1/fd/1"
|
||||
default_crontab_schedule = "0 2 * * *"
|
||||
|
||||
"""Bag for config values"""
|
||||
def __init__(self, check=True):
|
||||
# Mandatory values
|
||||
self.repository = os.environ.get('RESTIC_REPOSITORY')
|
||||
self.password = os.environ.get('RESTIC_REPOSITORY')
|
||||
self.docker_base_url = os.environ.get('DOCKER_BASE_URL') or "unix://tmp/docker.sock"
|
||||
self.cron_schedule = os.environ.get('CRON_SCHEDULE') or self.default_crontab_schedule
|
||||
self.cron_command = os.environ.get('CRON_COMMAND') or self.default_backup_command
|
||||
self.swarm_mode = os.environ.get('SWARM_MODE') or False
|
||||
self.include_project_name = os.environ.get('INCLUDE_PROJECT_NAME') or False
|
||||
self.exclude_bind_mounts = os.environ.get('EXCLUDE_BIND_MOUNTS') or False
|
||||
|
||||
# Log
|
||||
self.log_level = os.environ.get('LOG_LEVEL')
|
||||
|
@ -27,3 +34,6 @@ class Config:
|
|||
|
||||
if not self.password:
|
||||
raise ValueError("RESTIC_REPOSITORY env var not set")
|
||||
|
||||
|
||||
config = Config()
|
|
@ -1,8 +1,12 @@
|
|||
import os
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
|
||||
from restic_compose_backup import utils
|
||||
from restic_compose_backup import enums, utils
|
||||
from restic_compose_backup.config import config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
VOLUME_TYPE_BIND = "bind"
|
||||
VOLUME_TYPE_VOLUME = "volume"
|
||||
|
@ -27,8 +31,8 @@ class Container:
|
|||
if self._labels is None:
|
||||
raise ValueError('Container meta missing Config->Labels')
|
||||
|
||||
self._include = self._parse_pattern(self.get_label('restic-compose-backup.volumes.include'))
|
||||
self._exclude = self._parse_pattern(self.get_label('restic-compose-backup.volumes.exclude'))
|
||||
self._include = self._parse_pattern(self.get_label(enums.LABEL_VOLUMES_INCLUDE))
|
||||
self._exclude = self._parse_pattern(self.get_label(enums.LABEL_VOLUMES_EXCLUDE))
|
||||
|
||||
@property
|
||||
def instance(self) -> 'Container':
|
||||
|
@ -60,15 +64,49 @@ class Container:
|
|||
"""Image name"""
|
||||
return self.get_config('Image')
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
"""Container name"""
|
||||
return self._data['Name'].replace('/', '')
|
||||
|
||||
@property
|
||||
def service_name(self) -> str:
|
||||
"""Name of the container/service"""
|
||||
return self.get_label('com.docker.compose.service', default='') or \
|
||||
self.get_label('com.docker.swarm.service.name', default='')
|
||||
|
||||
@property
|
||||
def backup_process_label(self) -> str:
|
||||
"""str: The unique backup process label for this project"""
|
||||
return f"{enums.LABEL_BACKUP_PROCESS}-{self.project_name}"
|
||||
|
||||
@property
|
||||
def project_name(self) -> str:
|
||||
"""str: Name of the compose setup"""
|
||||
return self.get_label('com.docker.compose.project', default='')
|
||||
|
||||
@property
|
||||
def stack_name(self) -> str:
|
||||
"""str: Name of the stack is present"""
|
||||
return self.get_label("com.docker.stack.namespace")
|
||||
|
||||
@property
|
||||
def is_oneoff(self) -> bool:
|
||||
"""Was this container started with run command?"""
|
||||
return self.get_label('com.docker.compose.oneoff', default='False') == 'True'
|
||||
|
||||
@property
|
||||
def environment(self) -> list:
|
||||
"""All configured env vars for the container as a list"""
|
||||
return self.get_config('Env')
|
||||
|
||||
def remove(self):
|
||||
self._data.remove()
|
||||
|
||||
def get_config_env(self, name) -> str:
|
||||
"""Get a config environment variable by name"""
|
||||
# convert to dict and fetch env var by name
|
||||
data = {i[0:i.find('=')]: i[i.find('=')+1:] for i in self.environment}
|
||||
data = {i[0:i.find('=')]: i[i.find('=') + 1:] for i in self.environment}
|
||||
return data.get(name)
|
||||
|
||||
def set_config_env(self, name, value):
|
||||
|
@ -107,7 +145,8 @@ class Container:
|
|||
|
||||
@property
|
||||
def volume_backup_enabled(self) -> bool:
|
||||
return utils.is_true(self.get_label('restic-compose-backup.volumes'))
|
||||
"""bool: If the ``restic-compose-backup.volumes`` label is set"""
|
||||
return utils.is_true(self.get_label(enums.LABEL_VOLUMES_ENABLED))
|
||||
|
||||
@property
|
||||
def database_backup_enabled(self) -> bool:
|
||||
|
@ -120,46 +159,29 @@ class Container:
|
|||
|
||||
@property
|
||||
def mysql_backup_enabled(self) -> bool:
|
||||
return utils.is_true(self.get_label('restic-compose-backup.mysql'))
|
||||
"""bool: If the ``restic-compose-backup.mysql`` label is set"""
|
||||
return utils.is_true(self.get_label(enums.LABEL_MYSQL_ENABLED))
|
||||
|
||||
@property
|
||||
def mariadb_backup_enabled(self) -> bool:
|
||||
return utils.is_true(self.get_label('restic-compose-backup.mariadb'))
|
||||
"""bool: If the ``restic-compose-backup.mariadb`` label is set"""
|
||||
return utils.is_true(self.get_label(enums.LABEL_MARIADB_ENABLED))
|
||||
|
||||
@property
|
||||
def postgresql_backup_enabled(self) -> bool:
|
||||
return utils.is_true(self.get_label('restic-compose-backup.postgres'))
|
||||
"""bool: If the ``restic-compose-backup.postgres`` label is set"""
|
||||
return utils.is_true(self.get_label(enums.LABEL_POSTGRES_ENABLED))
|
||||
|
||||
@property
|
||||
def is_backup_process_container(self) -> bool:
|
||||
"""Is this container the running backup process?"""
|
||||
return self.get_label('restic-compose-backup.backup_process') == 'True'
|
||||
return self.get_label(self.backup_process_label) == 'True'
|
||||
|
||||
@property
|
||||
def is_running(self) -> bool:
|
||||
"""Is the container running?"""
|
||||
"""bool: Is the container running?"""
|
||||
return self._state.get('Running', False)
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
"""Container name"""
|
||||
return self._data['Name'].replace('/', '')
|
||||
|
||||
@property
|
||||
def service_name(self) -> str:
|
||||
"""Name of the container/service"""
|
||||
return self.get_label('com.docker.compose.service', default='')
|
||||
|
||||
@property
|
||||
def project_name(self) -> str:
|
||||
"""Name of the compose setup"""
|
||||
return self.get_label('com.docker.compose.project', default='')
|
||||
|
||||
@property
|
||||
def is_oneoff(self) -> bool:
|
||||
"""Was this container started with run command?"""
|
||||
return self.get_label('com.docker.compose.oneoff', default='False') == 'True'
|
||||
|
||||
def get_config(self, name, default=None):
|
||||
"""Get value from config dict"""
|
||||
return self._config.get(name, default)
|
||||
|
@ -169,14 +191,18 @@ class Container:
|
|||
return self._labels.get(name, None)
|
||||
|
||||
def filter_mounts(self):
|
||||
"""Get all mounts for this container matching include/exclude filters"""
|
||||
"""Get all mounts for this container matching include/exclude filters"""
|
||||
filtered = []
|
||||
|
||||
# If exclude_bind_mounts is true, only volume mounts are kept in the list of mounts
|
||||
exclude_bind_mounts = utils.is_true(config.exclude_bind_mounts)
|
||||
mounts = list(filter(lambda m: not exclude_bind_mounts or m.type == "volume", self._mounts))
|
||||
|
||||
if not self.volume_backup_enabled:
|
||||
return filtered
|
||||
|
||||
if self._include:
|
||||
for mount in self._mounts:
|
||||
for mount in mounts:
|
||||
for pattern in self._include:
|
||||
if pattern in mount.source:
|
||||
break
|
||||
|
@ -186,14 +212,14 @@ class Container:
|
|||
filtered.append(mount)
|
||||
|
||||
elif self._exclude:
|
||||
for mount in self._mounts:
|
||||
for mount in mounts:
|
||||
for pattern in self._exclude:
|
||||
if pattern in mount.source:
|
||||
break
|
||||
else:
|
||||
filtered.append(mount)
|
||||
else:
|
||||
return self._mounts
|
||||
return mounts
|
||||
|
||||
return filtered
|
||||
|
||||
|
@ -203,12 +229,26 @@ class Container:
|
|||
volumes = {}
|
||||
for mount in mounts:
|
||||
volumes[mount.source] = {
|
||||
'bind': str(Path(source_prefix) / self.service_name / Path(utils.strip_root(mount.destination))),
|
||||
'bind': self.get_volume_backup_destination(mount, source_prefix),
|
||||
'mode': mode,
|
||||
}
|
||||
|
||||
return volumes
|
||||
|
||||
def get_volume_backup_destination(self, mount, source_prefix) -> str:
|
||||
"""Get the destination path for backups of the given mount"""
|
||||
destination = Path(source_prefix)
|
||||
|
||||
if utils.is_true(config.include_project_name):
|
||||
project_name = self.project_name
|
||||
if project_name != '':
|
||||
destination /= project_name
|
||||
|
||||
destination /= self.service_name
|
||||
destination /= Path(utils.strip_root(mount.destination))
|
||||
|
||||
return str(destination)
|
||||
|
||||
def get_credentials(self) -> dict:
|
||||
"""dict: get credentials for the service"""
|
||||
raise NotImplementedError("Base container class don't implement this")
|
||||
|
@ -221,6 +261,10 @@ class Container:
|
|||
"""Back up this service"""
|
||||
raise NotImplementedError("Base container class don't implement this")
|
||||
|
||||
def backup_destination_path(self) -> str:
|
||||
"""Return the path backups will be saved at"""
|
||||
raise NotImplementedError("Base container class don't implement this")
|
||||
|
||||
def dump_command(self) -> list:
|
||||
"""list: create a dump command restic and use to send data through stdin"""
|
||||
raise NotImplementedError("Base container class don't implement this")
|
||||
|
@ -310,6 +354,7 @@ class RunningContainers:
|
|||
self.containers = []
|
||||
self.this_container = None
|
||||
self.backup_process_container = None
|
||||
self.stale_backup_process_containers = []
|
||||
|
||||
# Find the container we are running in.
|
||||
# If we don't have this information we cannot continue
|
||||
|
@ -320,25 +365,51 @@ class RunningContainers:
|
|||
if not self.this_container:
|
||||
raise ValueError("Cannot find metadata for backup container")
|
||||
|
||||
# Gather all containers in the current compose setup
|
||||
# Gather all running containers in the current compose setup
|
||||
for container_data in all_containers:
|
||||
container = Container(container_data)
|
||||
|
||||
# Gather stale backup process containers
|
||||
if (self.this_container.image == container.image
|
||||
and not container.is_running
|
||||
and container.is_backup_process_container):
|
||||
self.stale_backup_process_containers.append(container)
|
||||
|
||||
# We only care about running containers after this point
|
||||
if not container.is_running:
|
||||
continue
|
||||
|
||||
# Detect running backup process container
|
||||
if container.is_backup_process_container:
|
||||
self.backup_process_container = container
|
||||
|
||||
# Detect containers belonging to the current compose setup
|
||||
if (container.project_name == self.this_container.project_name
|
||||
and not container.is_oneoff):
|
||||
if container.id != self.this_container.id:
|
||||
self.containers.append(container)
|
||||
# --- Determine what containers should be evaludated
|
||||
|
||||
# If not swarm mode we need to filter in compose project
|
||||
if not config.swarm_mode:
|
||||
if container.project_name != self.this_container.project_name:
|
||||
continue
|
||||
|
||||
# Containers started manually are not included
|
||||
if container.is_oneoff:
|
||||
continue
|
||||
|
||||
# Do not include the backup process container
|
||||
if container == self.backup_process_container:
|
||||
continue
|
||||
|
||||
self.containers.append(container)
|
||||
|
||||
@property
|
||||
def project_name(self) -> str:
|
||||
"""str: Name of the compose project"""
|
||||
return self.this_container.project_name
|
||||
|
||||
@property
|
||||
def backup_process_label(self) -> str:
|
||||
"""str: The backup process label for this project"""
|
||||
return self.this_container.backup_process_label
|
||||
|
||||
@property
|
||||
def backup_process_running(self) -> bool:
|
||||
"""Is the backup process container running?"""
|
||||
|
@ -358,6 +429,7 @@ class RunningContainers:
|
|||
return mounts
|
||||
|
||||
def get_service(self, name) -> Container:
|
||||
"""Container: Get a service by name"""
|
||||
for container in self.containers:
|
||||
if container.service_name == name:
|
||||
return container
|
|
@ -1,5 +1,7 @@
|
|||
from pathlib import Path
|
||||
|
||||
from restic_compose_backup.containers import Container
|
||||
from restic_compose_backup.config import Config
|
||||
from restic_compose_backup.config import config, Config
|
||||
from restic_compose_backup import (
|
||||
commands,
|
||||
restic,
|
||||
|
@ -39,6 +41,7 @@ class MariadbContainer(Container):
|
|||
f"--port={creds['port']}",
|
||||
f"--user={creds['username']}",
|
||||
"--all-databases",
|
||||
"--no-tablespaces",
|
||||
]
|
||||
|
||||
def backup(self):
|
||||
|
@ -48,10 +51,23 @@ class MariadbContainer(Container):
|
|||
with utils.environment('MYSQL_PWD', creds['password']):
|
||||
return restic.backup_from_stdin(
|
||||
config.repository,
|
||||
f'/databases/{self.service_name}/all_databases.sql',
|
||||
self.backup_destination_path(),
|
||||
self.dump_command(),
|
||||
)
|
||||
|
||||
def backup_destination_path(self) -> str:
|
||||
destination = Path("/databases")
|
||||
|
||||
if utils.is_true(config.include_project_name):
|
||||
project_name = self.project_name
|
||||
if project_name != "":
|
||||
destination /= project_name
|
||||
|
||||
destination /= self.service_name
|
||||
destination /= "all_databases.sql"
|
||||
|
||||
return destination
|
||||
|
||||
|
||||
class MysqlContainer(Container):
|
||||
container_type = 'mysql'
|
||||
|
@ -85,6 +101,7 @@ class MysqlContainer(Container):
|
|||
f"--port={creds['port']}",
|
||||
f"--user={creds['username']}",
|
||||
"--all-databases",
|
||||
"--no-tablespaces",
|
||||
]
|
||||
|
||||
def backup(self):
|
||||
|
@ -94,10 +111,23 @@ class MysqlContainer(Container):
|
|||
with utils.environment('MYSQL_PWD', creds['password']):
|
||||
return restic.backup_from_stdin(
|
||||
config.repository,
|
||||
f'/databases/{self.service_name}/all_databases.sql',
|
||||
self.backup_destination_path(),
|
||||
self.dump_command(),
|
||||
)
|
||||
|
||||
def backup_destination_path(self) -> str:
|
||||
destination = Path("/databases")
|
||||
|
||||
if utils.is_true(config.include_project_name):
|
||||
project_name = self.project_name
|
||||
if project_name != "":
|
||||
destination /= project_name
|
||||
|
||||
destination /= self.service_name
|
||||
destination /= "all_databases.sql"
|
||||
|
||||
return destination
|
||||
|
||||
|
||||
class PostgresContainer(Container):
|
||||
container_type = 'postgres'
|
||||
|
@ -141,6 +171,19 @@ class PostgresContainer(Container):
|
|||
with utils.environment('PGPASSWORD', creds['password']):
|
||||
return restic.backup_from_stdin(
|
||||
config.repository,
|
||||
f"/databases/{self.service_name}/{creds['database']}.sql",
|
||||
self.backup_destination_path(),
|
||||
self.dump_command(),
|
||||
)
|
||||
|
||||
def backup_destination_path(self) -> str:
|
||||
destination = Path("/databases")
|
||||
|
||||
if utils.is_true(config.include_project_name):
|
||||
project_name = self.project_name
|
||||
if project_name != "":
|
||||
destination /= project_name
|
||||
|
||||
destination /= self.service_name
|
||||
destination /= f"{self.get_credentials()['database']}.sql"
|
||||
|
||||
return destination
|
|
@ -0,0 +1,69 @@
|
|||
"""
|
||||
# ┌───────────── minute (0 - 59)
|
||||
# │ ┌───────────── hour (0 - 23)
|
||||
# │ │ ┌───────────── day of the month (1 - 31)
|
||||
# │ │ │ ┌───────────── month (1 - 12)
|
||||
# │ │ │ │ ┌───────────── day of the week (0 - 6) (Sunday to Saturday;
|
||||
# │ │ │ │ │ 7 is also Sunday on some systems)
|
||||
# │ │ │ │ │
|
||||
# │ │ │ │ │
|
||||
# * * * * * command to execute
|
||||
"""
|
||||
QUOTE_CHARS = ['"', "'"]
|
||||
|
||||
|
||||
def generate_crontab(config):
|
||||
"""Generate a crontab entry for running backup job"""
|
||||
command = config.cron_command.strip()
|
||||
schedule = config.cron_schedule
|
||||
|
||||
if schedule:
|
||||
schedule = schedule.strip()
|
||||
schedule = strip_quotes(schedule)
|
||||
if not validate_schedule(schedule):
|
||||
schedule = config.default_crontab_schedule
|
||||
else:
|
||||
schedule = config.default_crontab_schedule
|
||||
|
||||
return f'{schedule} {command}\n'
|
||||
|
||||
|
||||
def validate_schedule(schedule: str):
|
||||
"""Validate crontab format"""
|
||||
parts = schedule.split()
|
||||
if len(parts) != 5:
|
||||
return False
|
||||
|
||||
for p in parts:
|
||||
if p != '*' and not p.isdigit():
|
||||
return False
|
||||
|
||||
minute, hour, day, month, weekday = parts
|
||||
try:
|
||||
validate_field(minute, 0, 59)
|
||||
validate_field(hour, 0, 23)
|
||||
validate_field(day, 1, 31)
|
||||
validate_field(month, 1, 12)
|
||||
validate_field(weekday, 0, 6)
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def validate_field(value, min, max):
|
||||
if value == '*':
|
||||
return
|
||||
|
||||
i = int(value)
|
||||
return min <= i <= max
|
||||
|
||||
|
||||
def strip_quotes(value: str):
|
||||
"""Strip enclosing single or double quotes if present"""
|
||||
if value[0] in QUOTE_CHARS:
|
||||
value = value[1:]
|
||||
if value[-1] in QUOTE_CHARS:
|
||||
value = value[:-1]
|
||||
|
||||
return value
|
|
@ -0,0 +1,11 @@
|
|||
|
||||
# Labels
|
||||
LABEL_VOLUMES_ENABLED = 'restic-compose-backup.volumes'
|
||||
LABEL_VOLUMES_INCLUDE = 'restic-compose-backup.volumes.include'
|
||||
LABEL_VOLUMES_EXCLUDE = 'restic-compose-backup.volumes.exclude'
|
||||
|
||||
LABEL_MYSQL_ENABLED = 'restic-compose-backup.mysql'
|
||||
LABEL_POSTGRES_ENABLED = 'restic-compose-backup.postgres'
|
||||
LABEL_MARIADB_ENABLED = 'restic-compose-backup.mariadb'
|
||||
|
||||
LABEL_BACKUP_PROCESS = 'restic-compose-backup.process'
|
|
@ -13,6 +13,7 @@ LOG_LEVELS = {
|
|||
'error': logging.ERROR,
|
||||
}
|
||||
|
||||
|
||||
def setup(level: str = 'warning'):
|
||||
"""Set up logging"""
|
||||
level = level or ""
|
||||
|
@ -21,7 +22,7 @@ def setup(level: str = 'warning'):
|
|||
|
||||
ch = logging.StreamHandler(stream=sys.stdout)
|
||||
ch.setLevel(level)
|
||||
# ch.setFormatter(logging.Formatter(f'%(asctime)s - {HOSTNAME} - %(name)s - %(levelname)s - %(message)s'))
|
||||
# ch.setFormatter(logging.Formatter(f'%(asctime)s - {HOSTNAME} - %(levelname)s - %(message)s'))
|
||||
ch.setFormatter(logging.Formatter(f'%(asctime)s - %(levelname)s: %(message)s'))
|
||||
# ch.setFormatter(logging.Formatter('%(asctime)s - {HOSTNAME} - %(name)s - %(levelname)s - %(message)s'))
|
||||
# ch.setFormatter(logging.Formatter('%(asctime)s - {HOSTNAME} - %(levelname)s - %(message)s'))
|
||||
ch.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s: %(message)s'))
|
||||
logger.addHandler(ch)
|
|
@ -40,36 +40,46 @@ def backup_from_stdin(repository: str, filename: str, source_command: List[str])
|
|||
])
|
||||
|
||||
# pipe source command into dest command
|
||||
# NOTE: Using the default buffer size: io.DEFAULT_BUFFER_SIZE = 8192
|
||||
# We might want to tweak that to speed up large dumps.
|
||||
# Actual tests tests must be done.
|
||||
source_process = Popen(source_command, stdout=PIPE)
|
||||
dest_process = Popen(dest_command, stdin=source_process.stdout, stdout=PIPE, stderr=PIPE)
|
||||
source_process = Popen(source_command, stdout=PIPE, bufsize=65536)
|
||||
dest_process = Popen(dest_command, stdin=source_process.stdout, stdout=PIPE, stderr=PIPE, bufsize=65536)
|
||||
stdout, stderr = dest_process.communicate()
|
||||
|
||||
if stdout:
|
||||
for line in stdout.decode().split('\n'):
|
||||
logger.debug(line)
|
||||
|
||||
if stderr:
|
||||
for line in stderr.decode().split('\n'):
|
||||
logger.error(line)
|
||||
|
||||
# Ensure both processes exited with code 0
|
||||
source_exit, dest_exit = source_process.poll(), dest_process.poll()
|
||||
return 0 if (source_exit == 0 and dest_exit == 0) else 1
|
||||
exit_code = 0 if (source_exit == 0 and dest_exit == 0) else 1
|
||||
|
||||
if stdout:
|
||||
commands.log_std('stdout', stdout, logging.DEBUG if exit_code == 0 else logging.ERROR)
|
||||
|
||||
if stderr:
|
||||
commands.log_std('stderr', stderr, logging.ERROR)
|
||||
|
||||
return exit_code
|
||||
|
||||
|
||||
def snapshots(repository: str, last=True) -> Tuple[str, str]:
|
||||
"""Returns the stdout and stderr info"""
|
||||
args = ["snapshots"]
|
||||
if last:
|
||||
args.append('--last')
|
||||
args.append('--last')
|
||||
return commands.run_capture_std(restic(repository, args))
|
||||
|
||||
|
||||
def is_initialized(repository: str) -> bool:
|
||||
"""
|
||||
Checks if a repository is initialized using snapshots command.
|
||||
Note that this cannot separate between uninitalized repo
|
||||
and other errors, but this method is reccomended by the restic
|
||||
community.
|
||||
"""
|
||||
return commands.run(restic(repository, ["snapshots", '--last'])) == 0
|
||||
|
||||
|
||||
def forget(repository: str, daily: str, weekly: str, monthly: str, yearly: str):
|
||||
return restic(repository, [
|
||||
return commands.run(restic(repository, [
|
||||
'forget',
|
||||
'--group-by',
|
||||
'paths',
|
||||
'--keep-daily',
|
||||
daily,
|
||||
'--keep-weekly',
|
||||
|
@ -78,18 +88,19 @@ def forget(repository: str, daily: str, weekly: str, monthly: str, yearly: str):
|
|||
monthly,
|
||||
'--keep-yearly',
|
||||
yearly,
|
||||
])
|
||||
]))
|
||||
|
||||
|
||||
def prune(repository: str):
|
||||
return restic(repository, [
|
||||
return commands.run(restic(repository, [
|
||||
'prune',
|
||||
])
|
||||
]))
|
||||
|
||||
|
||||
def check(repository: str):
|
||||
return commands.run(restic(repository, [
|
||||
"check",
|
||||
# "--with-cache",
|
||||
]))
|
||||
|
||||
|
|
@ -0,0 +1,97 @@
|
|||
import os
|
||||
import logging
|
||||
from typing import List, TYPE_CHECKING
|
||||
from contextlib import contextmanager
|
||||
import docker
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from restic_compose_backup.containers import Container
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
TRUE_VALUES = ['1', 'true', 'True', True, 1]
|
||||
|
||||
|
||||
def docker_client():
|
||||
"""
|
||||
Create a docker client from the following environment variables::
|
||||
|
||||
DOCKER_HOST=unix://tmp/docker.sock
|
||||
DOCKER_TLS_VERIFY=1
|
||||
DOCKER_CERT_PATH=''
|
||||
"""
|
||||
# NOTE: Remove this fallback in 1.0
|
||||
if not os.environ.get('DOCKER_HOST'):
|
||||
os.environ['DOCKER_HOST'] = 'unix://tmp/docker.sock'
|
||||
|
||||
return docker.from_env()
|
||||
|
||||
|
||||
def list_containers() -> List[dict]:
|
||||
"""
|
||||
List all containers.
|
||||
|
||||
Returns:
|
||||
List of raw container json data from the api
|
||||
"""
|
||||
client = docker_client()
|
||||
all_containers = client.containers.list(all=True)
|
||||
client.close()
|
||||
return [c.attrs for c in all_containers]
|
||||
|
||||
|
||||
def get_swarm_nodes():
|
||||
client = docker_client()
|
||||
# NOTE: If not a swarm node docker.errors.APIError is raised
|
||||
# 503 Server Error: Service Unavailable
|
||||
# ("This node is not a swarm manager. Use "docker swarm init" or
|
||||
# "docker swarm join" to connect this node to swarm and try again.")
|
||||
try:
|
||||
return client.nodes.list()
|
||||
except docker.errors.APIError:
|
||||
return []
|
||||
|
||||
|
||||
def remove_containers(containers: List['Container']):
|
||||
client = docker_client()
|
||||
logger.info('Attempting to delete stale backup process containers')
|
||||
for container in containers:
|
||||
logger.info(' -> deleting %s', container.name)
|
||||
try:
|
||||
c = client.containers.get(container.name)
|
||||
c.remove()
|
||||
except Exception as ex:
|
||||
logger.exception(ex)
|
||||
|
||||
|
||||
def is_true(value):
|
||||
"""
|
||||
Evaluates the truthfullness of a bool value in container labels
|
||||
"""
|
||||
return value in TRUE_VALUES
|
||||
|
||||
|
||||
def strip_root(path):
|
||||
"""
|
||||
Removes the root slash in a path.
|
||||
Example: /srv/data becomes srv/data
|
||||
"""
|
||||
path = path.strip()
|
||||
if path.startswith('/'):
|
||||
return path[1:]
|
||||
|
||||
return path
|
||||
|
||||
|
||||
@contextmanager
|
||||
def environment(name, value):
|
||||
"""Tempset env var"""
|
||||
old_val = os.environ.get(name)
|
||||
os.environ[name] = value
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
if old_val is None:
|
||||
del os.environ[name]
|
||||
else:
|
||||
os.environ[name] = old_val
|
|
@ -3,12 +3,15 @@ from setuptools import setup, find_namespace_packages
|
|||
setup(
|
||||
name="restic-compose-backup",
|
||||
url="https://github.com/ZettaIO/restic-compose-backup",
|
||||
version="0.3.0",
|
||||
version="0.7.1",
|
||||
author="Einar Forselv",
|
||||
author_email="eforselv@gmail.com",
|
||||
packages=find_namespace_packages(include=['restic_compose_backup']),
|
||||
packages=find_namespace_packages(include=[
|
||||
'restic_compose_backup',
|
||||
'restic_compose_backup.*',
|
||||
]),
|
||||
install_requires=[
|
||||
'docker==3.7.2',
|
||||
'docker~=6.1.3',
|
||||
],
|
||||
entry_points={'console_scripts': [
|
||||
'restic-compose-backup = restic_compose_backup.cli:main',
|
|
@ -160,7 +160,7 @@
|
|||
"OpenStdin": true,
|
||||
"StdinOnce": true,
|
||||
"Env": [
|
||||
"DOCKER_BASE_URL=unix://tmp/docker.sock",
|
||||
"DOCKER_HOST=unix://tmp/docker.sock",
|
||||
"RESTIC_REPOSITORY=/tmp/backup",
|
||||
"RESTIC_PASSWORD=password",
|
||||
"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
|
|
@ -1 +1,2 @@
|
|||
pytest==4.3.1
|
||||
tox
|
|
@ -3,6 +3,9 @@ import os
|
|||
import unittest
|
||||
from unittest import mock
|
||||
|
||||
os.environ['RESTIC_REPOSITORY'] = "test"
|
||||
os.environ['RESTIC_PASSWORD'] = "password"
|
||||
|
||||
from restic_compose_backup import utils
|
||||
from restic_compose_backup.containers import RunningContainers
|
||||
import fixtures
|
||||
|
@ -15,8 +18,8 @@ class ResticBackupTests(unittest.TestCase):
|
|||
@classmethod
|
||||
def setUpClass(cls):
|
||||
"""Set up basic environment variables"""
|
||||
os.environ['RESTIC_REPOSITORY'] = "test"
|
||||
os.environ['RESTIC_PASSWORD'] = "password"
|
||||
# os.environ['RESTIC_REPOSITORY'] = "test"
|
||||
# os.environ['RESTIC_PASSWORD'] = "password"
|
||||
|
||||
def createContainers(self):
|
||||
backup_hash = fixtures.generate_sha256()
|
||||
|
@ -77,7 +80,7 @@ class ResticBackupTests(unittest.TestCase):
|
|||
]
|
||||
with mock.patch(list_containers_func, fixtures.containers(containers=containers)):
|
||||
result = RunningContainers()
|
||||
self.assertEqual(len(result.containers), 3, msg="Three containers expected")
|
||||
self.assertEqual(len(result.containers), 4, msg="Three containers expected")
|
||||
self.assertNotEqual(result.this_container, None, msg="No backup container found")
|
||||
web_service = result.get_service('web')
|
||||
self.assertNotEqual(web_service, None)
|
||||
|
@ -191,7 +194,7 @@ class ResticBackupTests(unittest.TestCase):
|
|||
{
|
||||
'service': 'backup_runner',
|
||||
'labels': {
|
||||
'restic-compose-backup.backup_process': 'True',
|
||||
'restic-compose-backup.process-default': 'True',
|
||||
},
|
||||
},
|
||||
]
|
|
@ -0,0 +1,30 @@
|
|||
version: '3.7'
|
||||
|
||||
services:
|
||||
mariadb:
|
||||
image: mariadb:10
|
||||
labels:
|
||||
restic-compose-backup.mariadb: "true"
|
||||
environment:
|
||||
- MYSQL_ROOT_PASSWORD=my-secret-pw
|
||||
- MYSQL_DATABASE=mydb
|
||||
- MYSQL_USER=myuser
|
||||
- MYSQL_PASSWORD=mypassword
|
||||
networks:
|
||||
- global
|
||||
volumes:
|
||||
- mariadbdata:/var/lib/mysql
|
||||
files:
|
||||
image: nginx:1.17-alpine
|
||||
labels:
|
||||
restic-compose-backup.volumes: "true"
|
||||
volumes:
|
||||
- files:/srv/files
|
||||
|
||||
volumes:
|
||||
mariadbdata:
|
||||
files:
|
||||
|
||||
networks:
|
||||
global:
|
||||
external: true
|
|
@ -0,0 +1,56 @@
|
|||
# Ensure that this file do not contain non-ascii characters
|
||||
# as flake8 can fail to parse the file on OS X and Windows
|
||||
|
||||
[tox]
|
||||
skipsdist = True
|
||||
setupdir={toxinidir}/src
|
||||
envlist =
|
||||
py38
|
||||
pep8
|
||||
|
||||
[testenv]
|
||||
usedevelop = True
|
||||
basepython =
|
||||
py38: python3.8
|
||||
|
||||
deps =
|
||||
-r{toxinidir}/src//tests/requirements.txt
|
||||
commands =
|
||||
; coverage run --source=restic_compose_backup -m pytest tests/
|
||||
; coverage report
|
||||
pytest
|
||||
|
||||
[testenv:pep8]
|
||||
usedevelop = false
|
||||
deps = flake8
|
||||
basepython = python3.8
|
||||
commands = flake8
|
||||
|
||||
[pytest]
|
||||
norecursedirs = tests/* .venv/* .tox/* build/ docs/
|
||||
|
||||
[flake8]
|
||||
# H405: multi line docstring summary not separated with an empty line
|
||||
# D100: Missing docstring in public module
|
||||
# D101: Missing docstring in public class
|
||||
# D102: Missing docstring in public method
|
||||
# D103: Missing docstring in public function
|
||||
# D104: Missing docstring in public package
|
||||
# D105: Missing docstring in magic method
|
||||
# D200: One-line docstring should fit on one line with quotes
|
||||
# D202: No blank lines allowed after function docstring
|
||||
# D203: 1 blank required before class docstring.
|
||||
# D204: 1 blank required after class docstring
|
||||
# D205: Blank line required between one-line summary and description.
|
||||
# D207: Docstring is under-indented
|
||||
# D208: Docstring is over-indented
|
||||
# D211: No blank lines allowed before class docstring
|
||||
# D301: Use r""" if any backslashes in a docstring
|
||||
# D400: First line should end with a period.
|
||||
# D401: First line should be in imperative mood.
|
||||
# *** E302 expected 2 blank lines, found 1
|
||||
# *** W503 line break before binary operator
|
||||
ignore = H405,D100,D101,D102,D103,D104,D105,D200,D202,D203,D204,D205,D211,D301,D400,D401,W503
|
||||
show-source = True
|
||||
max-line-length = 120
|
||||
exclude = .tox,env,tests,build,conf.py
|
Loading…
Reference in New Issue